[ 686.715845] env[68285]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68285) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 686.716213] env[68285]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68285) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 686.716339] env[68285]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68285) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 686.716608] env[68285]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 686.812914] env[68285]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68285) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 686.822567] env[68285]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68285) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 686.865481] env[68285]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 687.424640] env[68285]: INFO nova.virt.driver [None req-935f8585-057e-42c6-8dae-5785ccf8234b None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 687.494100] env[68285]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.494274] env[68285]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.494345] env[68285]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68285) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 690.386461] env[68285]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-6d20d350-e1df-47d4-9967-39c586797cfd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.402802] env[68285]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68285) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 690.402914] env[68285]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-6cf5976d-5777-4004-87ba-d15ddf0251cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.436450] env[68285]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 9bbb4. [ 690.436584] env[68285]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.942s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.437149] env[68285]: INFO nova.virt.vmwareapi.driver [None req-935f8585-057e-42c6-8dae-5785ccf8234b None None] VMware vCenter version: 7.0.3 [ 690.440709] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e613c4-3ea2-4462-b0b0-27c4d6b3d1b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.457568] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f31a1e-a1a3-4c5f-811f-29b0bb956645 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.463412] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac31b797-852c-4903-866b-8cce6f535655 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.469993] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad64df19-84ee-4cd1-83fa-63d43b23a50e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.482902] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c8ee5e-b3d8-4842-8fd1-a14512bc8a5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.488922] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1480696-e0f3-48d4-b56e-d0ab7cef5b5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.519145] env[68285]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-d059fd6f-1d21-4073-a169-798a278e512c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.524059] env[68285]: DEBUG nova.virt.vmwareapi.driver [None req-935f8585-057e-42c6-8dae-5785ccf8234b None None] Extension org.openstack.compute already exists. {{(pid=68285) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 690.527044] env[68285]: INFO nova.compute.provider_config [None req-935f8585-057e-42c6-8dae-5785ccf8234b None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 691.030539] env[68285]: DEBUG nova.context [None req-935f8585-057e-42c6-8dae-5785ccf8234b None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),c9f796c3-2b1f-46f3-9d35-9a1afc978dd6(cell1) {{(pid=68285) load_cells /opt/stack/nova/nova/context.py:464}} [ 691.032718] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.033046] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.034089] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.034732] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Acquiring lock "c9f796c3-2b1f-46f3-9d35-9a1afc978dd6" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.035037] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Lock "c9f796c3-2b1f-46f3-9d35-9a1afc978dd6" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.036487] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Lock "c9f796c3-2b1f-46f3-9d35-9a1afc978dd6" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.057930] env[68285]: INFO dbcounter [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Registered counter for database nova_cell0 [ 691.066219] env[68285]: INFO dbcounter [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Registered counter for database nova_cell1 [ 691.507323] env[68285]: DEBUG oslo_db.sqlalchemy.engines [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68285) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 691.507787] env[68285]: DEBUG oslo_db.sqlalchemy.engines [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68285) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 691.512802] env[68285]: ERROR nova.db.main.api [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 691.512802] env[68285]: result = function(*args, **kwargs) [ 691.512802] env[68285]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.512802] env[68285]: return func(*args, **kwargs) [ 691.512802] env[68285]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 691.512802] env[68285]: result = fn(*args, **kwargs) [ 691.512802] env[68285]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 691.512802] env[68285]: return f(*args, **kwargs) [ 691.512802] env[68285]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 691.512802] env[68285]: return db.service_get_minimum_version(context, binaries) [ 691.512802] env[68285]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 691.512802] env[68285]: _check_db_access() [ 691.512802] env[68285]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 691.512802] env[68285]: stacktrace = ''.join(traceback.format_stack()) [ 691.512802] env[68285]: [ 691.513631] env[68285]: ERROR nova.db.main.api [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 691.513631] env[68285]: result = function(*args, **kwargs) [ 691.513631] env[68285]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.513631] env[68285]: return func(*args, **kwargs) [ 691.513631] env[68285]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 691.513631] env[68285]: result = fn(*args, **kwargs) [ 691.513631] env[68285]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 691.513631] env[68285]: return f(*args, **kwargs) [ 691.513631] env[68285]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 691.513631] env[68285]: return db.service_get_minimum_version(context, binaries) [ 691.513631] env[68285]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 691.513631] env[68285]: _check_db_access() [ 691.513631] env[68285]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 691.513631] env[68285]: stacktrace = ''.join(traceback.format_stack()) [ 691.513631] env[68285]: [ 691.514043] env[68285]: WARNING nova.objects.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 691.514166] env[68285]: WARNING nova.objects.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Failed to get minimum service version for cell c9f796c3-2b1f-46f3-9d35-9a1afc978dd6 [ 691.514648] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Acquiring lock "singleton_lock" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.514870] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Acquired lock "singleton_lock" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.515069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Releasing lock "singleton_lock" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.515399] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Full set of CONF: {{(pid=68285) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 691.515543] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ******************************************************************************** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 691.515672] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] Configuration options gathered from: {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 691.515807] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 691.516046] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 691.516185] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ================================================================================ {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 691.516399] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] allow_resize_to_same_host = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.516573] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] arq_binding_timeout = 300 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.516710] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] backdoor_port = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.516841] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] backdoor_socket = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.517012] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] block_device_allocate_retries = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.517184] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] block_device_allocate_retries_interval = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.517356] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cert = self.pem {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.517525] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.517692] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute_monitors = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.517858] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] config_dir = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.518290] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] config_drive_format = iso9660 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.518444] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.518623] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] config_source = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.518796] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] console_host = devstack {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.518964] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] control_exchange = nova {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.519144] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cpu_allocation_ratio = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.519312] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] daemon = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.519484] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] debug = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.519640] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] default_access_ip_network_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.519805] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] default_availability_zone = nova {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.519961] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] default_ephemeral_format = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.520142] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] default_green_pool_size = 1000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.520386] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.520555] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] default_schedule_zone = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.520712] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] disk_allocation_ratio = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.520874] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] enable_new_services = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.521065] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] enabled_apis = ['osapi_compute'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.521231] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] enabled_ssl_apis = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.521394] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] flat_injected = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.521554] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] force_config_drive = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.521713] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] force_raw_images = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.521888] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] graceful_shutdown_timeout = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.522059] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] heal_instance_info_cache_interval = -1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.522284] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] host = cpu-1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.522462] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.522625] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.522786] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.522995] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.523173] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instance_build_timeout = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.523335] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instance_delete_interval = 300 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.523501] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instance_format = [instance: %(uuid)s] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.523665] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instance_name_template = instance-%08x {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.523824] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instance_usage_audit = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.523990] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instance_usage_audit_period = month {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.524168] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.524331] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.524494] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] internal_service_availability_zone = internal {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.524651] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] key = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.524811] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] live_migration_retry_count = 30 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.524980] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_color = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.525164] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_config_append = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.525334] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.525494] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_dir = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.525651] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_file = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.525779] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_options = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.525961] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_rotate_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.526147] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_rotate_interval_type = days {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.526323] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] log_rotation_type = none {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.526480] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.526612] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.526785] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.526953] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.527096] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.527259] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] long_rpc_timeout = 1800 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.527418] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] max_concurrent_builds = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.527575] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] max_concurrent_live_migrations = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.527731] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] max_concurrent_snapshots = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.527887] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] max_local_block_devices = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.528055] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] max_logfile_count = 30 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.528215] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] max_logfile_size_mb = 200 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.528370] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] maximum_instance_delete_attempts = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.528533] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] metadata_listen = 0.0.0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.528696] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] metadata_listen_port = 8775 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.528861] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] metadata_workers = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.529030] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] migrate_max_retries = -1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.529202] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] mkisofs_cmd = genisoimage {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.529406] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.529538] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] my_ip = 10.180.1.21 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.529741] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.529903] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] network_allocate_retries = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.530089] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.530260] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.530421] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] osapi_compute_listen_port = 8774 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.530584] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] osapi_compute_unique_server_name_scope = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.530749] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] osapi_compute_workers = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.530907] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] password_length = 12 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.531079] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] periodic_enable = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.531239] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] periodic_fuzzy_delay = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.531404] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] pointer_model = usbtablet {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.531569] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] preallocate_images = none {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.531729] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] publish_errors = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.531857] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] pybasedir = /opt/stack/nova {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.532027] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ram_allocation_ratio = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.532184] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] rate_limit_burst = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.532345] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] rate_limit_except_level = CRITICAL {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.532499] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] rate_limit_interval = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.532651] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] reboot_timeout = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.532803] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] reclaim_instance_interval = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.532953] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] record = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.533133] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] reimage_timeout_per_gb = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.533295] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] report_interval = 120 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.533452] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] rescue_timeout = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.533607] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] reserved_host_cpus = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.533763] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] reserved_host_disk_mb = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.533919] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] reserved_host_memory_mb = 512 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.534087] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] reserved_huge_pages = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.534246] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] resize_confirm_window = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.534403] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] resize_fs_using_block_device = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.534556] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] resume_guests_state_on_host_boot = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.534721] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.534878] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] rpc_response_timeout = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.535046] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] run_external_periodic_tasks = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.535213] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] running_deleted_instance_action = reap {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.535368] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.535520] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] running_deleted_instance_timeout = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.535673] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler_instance_sync_interval = 120 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.535839] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_down_time = 720 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.536041] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] servicegroup_driver = db {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.536215] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] shell_completion = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.536374] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] shelved_offload_time = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.536530] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] shelved_poll_interval = 3600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.536697] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] shutdown_timeout = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.536856] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] source_is_ipv6 = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.537018] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ssl_only = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.537312] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.537491] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] sync_power_state_interval = 600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.537653] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] sync_power_state_pool_size = 1000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.537819] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] syslog_log_facility = LOG_USER {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.537973] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] tempdir = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.538150] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] timeout_nbd = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.538318] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] transport_url = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.538475] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] update_resources_interval = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.538630] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] use_cow_images = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.538787] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] use_journal = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.538944] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] use_json = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.539114] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] use_rootwrap_daemon = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.539273] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] use_stderr = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.539430] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] use_syslog = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.539581] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vcpu_pin_set = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.539745] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plugging_is_fatal = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.539910] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plugging_timeout = 300 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.540083] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] virt_mkfs = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.540245] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] volume_usage_poll_interval = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.540411] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] watch_log_file = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.540581] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] web = /usr/share/spice-html5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 691.540764] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.540927] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.541105] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.541276] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_concurrency.disable_process_locking = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.541573] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.541754] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.541925] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.542111] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.542284] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.542479] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.542731] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.auth_strategy = keystone {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.542906] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.compute_link_prefix = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.543095] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.543273] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.dhcp_domain = novalocal {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.543443] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.enable_instance_password = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.543608] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.glance_link_prefix = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.543771] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.543944] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.544125] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.instance_list_per_project_cells = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.544289] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.list_records_by_skipping_down_cells = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.544451] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.local_metadata_per_cell = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.544620] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.max_limit = 1000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.544788] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.metadata_cache_expiration = 15 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.544963] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.neutron_default_tenant_id = default {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.545153] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.response_validation = warn {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.545329] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.use_neutron_default_nets = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.545498] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.545660] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.545828] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.546034] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.546214] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.vendordata_dynamic_targets = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.546378] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.vendordata_jsonfile_path = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.546558] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.546751] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.backend = dogpile.cache.memcached {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.546922] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.backend_argument = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.547092] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.backend_expiration_time = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.547264] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.config_prefix = cache.oslo {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.547433] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.dead_timeout = 60.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.547597] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.debug_cache_backend = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.547758] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.enable_retry_client = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.547918] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.enable_socket_keepalive = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.548100] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.enabled = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.548291] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.enforce_fips_mode = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.548461] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.expiration_time = 600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.548621] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.hashclient_retry_attempts = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.548786] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.548946] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_dead_retry = 300 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.549117] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_password = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.549282] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.549445] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.549602] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_pool_maxsize = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.549763] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.549942] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_sasl_enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.550142] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.550312] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.550473] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.memcache_username = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.550637] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.proxies = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.550799] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.redis_db = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.550957] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.redis_password = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.551145] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.551321] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.551487] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.redis_server = localhost:6379 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.551651] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.redis_socket_timeout = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.551810] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.redis_username = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.551972] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.retry_attempts = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.552152] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.retry_delay = 0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.552313] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.socket_keepalive_count = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.552473] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.socket_keepalive_idle = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.552632] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.socket_keepalive_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.552788] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.tls_allowed_ciphers = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.552946] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.tls_cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.553115] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.tls_certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.553280] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.tls_enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.553437] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cache.tls_keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.553662] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.553856] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.auth_type = password {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.554034] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.554217] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.554378] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.554542] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.554702] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.cross_az_attach = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.554861] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.debug = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.555031] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.endpoint_template = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.555200] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.http_retries = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.555361] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.555518] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.555691] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.os_region_name = RegionOne {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.555877] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.556055] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cinder.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.556234] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.556396] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.cpu_dedicated_set = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.556554] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.cpu_shared_set = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.556714] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.image_type_exclude_list = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.556897] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.557087] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.557257] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.557420] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.557591] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.557754] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.resource_provider_association_refresh = 300 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.557916] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.558090] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.shutdown_retry_interval = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.558302] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.558492] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] conductor.workers = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.558673] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] console.allowed_origins = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.558833] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] console.ssl_ciphers = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.559049] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] console.ssl_minimum_version = default {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.559235] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] consoleauth.enforce_session_timeout = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.559406] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] consoleauth.token_ttl = 600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.559578] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.559738] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.559899] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.560071] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.560234] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.560394] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.560554] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.560712] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.560869] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.561036] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.561200] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.561357] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.561513] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.561681] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.service_type = accelerator {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.561841] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.561999] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.562170] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.562325] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.562503] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.562660] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] cyborg.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.562827] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.asyncio_connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.562986] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.asyncio_slave_connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.563173] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.backend = sqlalchemy {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.563343] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.563510] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.connection_debug = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.563679] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.connection_parameters = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.563845] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.connection_recycle_time = 3600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.564014] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.connection_trace = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.564184] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.db_inc_retry_interval = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.564348] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.db_max_retries = 20 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.564508] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.db_max_retry_interval = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.564667] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.db_retry_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.564827] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.max_overflow = 50 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.564985] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.max_pool_size = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.565158] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.max_retries = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.565329] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.565487] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.mysql_wsrep_sync_wait = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.565644] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.pool_timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.565803] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.retry_interval = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.565993] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.slave_connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.566177] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.sqlite_synchronous = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.566338] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] database.use_db_reconnect = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.566503] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.asyncio_connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.566660] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.asyncio_slave_connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.566839] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.backend = sqlalchemy {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.567046] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.567221] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.connection_debug = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.567391] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.connection_parameters = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.567555] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.connection_recycle_time = 3600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.567717] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.connection_trace = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.567878] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.db_inc_retry_interval = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.568056] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.db_max_retries = 20 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.568244] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.db_max_retry_interval = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.568417] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.db_retry_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.568580] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.max_overflow = 50 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.568741] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.max_pool_size = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.568921] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.max_retries = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.569127] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.569291] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.569450] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.pool_timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.569613] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.retry_interval = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.569770] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.slave_connection = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.569930] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] api_database.sqlite_synchronous = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.570119] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] devices.enabled_mdev_types = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.570320] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.570500] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.570664] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ephemeral_storage_encryption.enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.570825] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.571031] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.api_servers = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.571217] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.571378] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.571541] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.571701] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.571857] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.572030] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.debug = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.572203] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.default_trusted_certificate_ids = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.572365] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.enable_certificate_validation = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.572525] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.enable_rbd_download = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.572683] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.572849] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.573027] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.573194] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.573352] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.573515] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.num_retries = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.573686] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.rbd_ceph_conf = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.573849] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.rbd_connect_timeout = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.574028] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.rbd_pool = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.574201] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.rbd_user = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.574368] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.574524] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.574679] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.574847] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.service_type = image {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.575021] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.575178] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.575331] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.575486] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.575663] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.575831] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.verify_glance_signatures = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.576012] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] glance.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.576191] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] guestfs.debug = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.576358] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.576521] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.auth_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.576678] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.576845] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.577041] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.577209] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.577368] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.577526] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.577687] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.577844] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.578010] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.578175] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.578331] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.578487] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.578642] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.578809] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.service_type = shared-file-system {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.578973] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.share_apply_policy_timeout = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.579149] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.579307] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.579461] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.579616] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.579794] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.579952] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] manila.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.580136] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] mks.enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.580484] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.580672] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] image_cache.manager_interval = 2400 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.580841] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] image_cache.precache_concurrency = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.581022] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] image_cache.remove_unused_base_images = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.581197] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.581364] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.581539] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] image_cache.subdirectory_name = _base {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.581713] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.api_max_retries = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.581877] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.api_retry_interval = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.582048] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.582215] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.auth_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.582375] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.582534] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.582695] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.582859] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.conductor_group = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.583077] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.583255] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.583417] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.583579] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.583739] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.583897] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.584067] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.584235] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.peer_list = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.584395] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.584553] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.584715] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.serial_console_state_timeout = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.584873] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.585056] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.service_type = baremetal {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.585221] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.shard = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.585383] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.585542] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.585698] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.585868] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.586081] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.586251] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ironic.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.586435] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.586610] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] key_manager.fixed_key = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.586793] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.586990] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.barbican_api_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.587176] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.barbican_endpoint = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.587349] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.barbican_endpoint_type = public {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.587510] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.barbican_region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.587667] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.587824] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.587990] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.588164] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.588321] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.588479] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.number_of_retries = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.588636] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.retry_delay = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.588797] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.send_service_user_token = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.588956] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.589125] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.589286] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.verify_ssl = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.589443] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican.verify_ssl_path = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.589606] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.589767] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.auth_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.589925] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.590092] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.590258] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.590420] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.590575] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.590737] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.590892] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] barbican_service_user.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.591076] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.approle_role_id = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.591238] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.approle_secret_id = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.591408] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.kv_mountpoint = secret {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.591567] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.kv_path = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.591729] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.kv_version = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.591884] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.namespace = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.592068] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.root_token_id = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.592235] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.ssl_ca_crt_file = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.592400] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.timeout = 60.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.592562] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.use_ssl = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.592731] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.592898] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.593079] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.593250] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.593406] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.593563] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.593723] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.593883] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.594071] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.594235] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.594389] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.594546] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.594705] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.594866] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.595044] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.service_type = identity {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.595210] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.595367] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.595525] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.595681] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.595879] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.596075] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] keystone.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.596277] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.ceph_mount_options = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.596688] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.596898] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.connection_uri = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.597094] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.cpu_mode = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.597271] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.597441] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.cpu_models = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.597614] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.cpu_power_governor_high = performance {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.597786] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.597950] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.cpu_power_management = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.598139] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.598320] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.device_detach_attempts = 8 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.598488] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.device_detach_timeout = 20 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.598656] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.disk_cachemodes = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.598818] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.disk_prefix = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.598982] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.enabled_perf_events = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.599181] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.file_backed_memory = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.599349] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.gid_maps = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.599508] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.hw_disk_discard = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.599664] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.hw_machine_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.599834] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.images_rbd_ceph_conf = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.600008] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.600178] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.600348] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.images_rbd_glance_store_name = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.600518] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.images_rbd_pool = rbd {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.600687] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.images_type = default {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.600845] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.images_volume_group = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.601014] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.inject_key = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.601183] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.inject_partition = -2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.601344] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.inject_password = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.601505] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.iscsi_iface = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.601667] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.iser_use_multipath = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.601827] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.601987] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.602164] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_downtime = 500 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.602327] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.602485] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.602641] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_inbound_addr = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.602803] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.602962] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.603139] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_scheme = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.603312] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_timeout_action = abort {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.603476] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_tunnelled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.603637] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_uri = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.603836] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.live_migration_with_native_tls = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.604032] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.max_queues = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.604206] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.604435] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.604601] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.nfs_mount_options = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.604885] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.605081] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.605260] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.605419] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.605582] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.605776] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.num_pcie_ports = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.605952] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.606137] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.pmem_namespaces = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.606301] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.quobyte_client_cfg = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.606595] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.606769] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.606935] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.607111] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.607275] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rbd_secret_uuid = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.607432] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rbd_user = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.607592] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.607764] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.607922] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rescue_image_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.608093] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rescue_kernel_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.608255] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rescue_ramdisk_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.608423] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.608584] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.rx_queue_size = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.608751] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.smbfs_mount_options = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.609053] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.609231] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.snapshot_compression = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.609393] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.snapshot_image_format = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.609615] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.609781] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.sparse_logical_volumes = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.609946] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.swtpm_enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.610129] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.swtpm_group = tss {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.610298] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.swtpm_user = tss {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.610465] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.sysinfo_serial = unique {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.610623] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.tb_cache_size = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.610777] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.tx_queue_size = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.610938] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.uid_maps = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.611111] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.use_virtio_for_bridges = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.611282] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.virt_type = kvm {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.611447] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.volume_clear = zero {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.611607] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.volume_clear_size = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.611767] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.volume_enforce_multipath = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.611929] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.volume_use_multipath = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.612102] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.vzstorage_cache_path = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.612272] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.612437] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.612599] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.612764] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.613074] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.613258] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.vzstorage_mount_user = stack {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.613422] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.613592] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.613790] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.auth_type = password {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.613963] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.614140] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.614305] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.614462] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.614618] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.614788] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.default_floating_pool = public {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.614946] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.615127] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.extension_sync_interval = 600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.615285] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.http_retries = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.615446] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.615604] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.615761] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.615960] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.616145] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.616319] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.ovs_bridge = br-int {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.616484] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.physnets = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.616654] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.region_name = RegionOne {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.616817] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.616987] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.service_metadata_proxy = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.617163] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.617333] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.service_type = network {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.617494] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.617653] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.617809] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.617966] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.618167] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.618345] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] neutron.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.618521] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] notifications.bdms_in_notifications = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.618698] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] notifications.default_level = INFO {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.618862] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] notifications.include_share_mapping = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.619066] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] notifications.notification_format = unversioned {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.619278] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] notifications.notify_on_state_change = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.619463] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.619641] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] pci.alias = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.619810] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] pci.device_spec = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.619974] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] pci.report_in_placement = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.620163] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.620337] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.auth_type = password {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.620507] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.620665] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.620825] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.620988] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.621164] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.621322] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.621479] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.default_domain_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.621635] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.default_domain_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.621790] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.domain_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.621944] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.domain_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.622114] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.622302] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.622472] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.622633] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.622787] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.622955] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.password = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.623132] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.project_domain_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.623302] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.project_domain_name = Default {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.623467] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.project_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.623640] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.project_name = service {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.623830] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.region_name = RegionOne {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.624017] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.624184] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.624353] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.service_type = placement {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.624514] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.624673] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.624830] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.624986] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.system_scope = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.625159] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.625317] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.trust_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.625472] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.user_domain_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.625640] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.user_domain_name = Default {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.625802] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.user_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.626037] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.username = nova {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.626233] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.626397] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] placement.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.626577] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.cores = 20 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.626741] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.count_usage_from_placement = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.626910] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.627088] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.injected_file_content_bytes = 10240 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.627259] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.injected_file_path_length = 255 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.627423] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.injected_files = 5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.627588] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.instances = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.627753] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.key_pairs = 100 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.627918] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.metadata_items = 128 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.628101] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.ram = 51200 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.628293] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.recheck_quota = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.628470] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.server_group_members = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.628636] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.server_groups = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.628843] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.629028] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] quota.unified_limits_resource_strategy = require {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.629207] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.629371] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.629532] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.image_metadata_prefilter = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.629691] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.629851] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.max_attempts = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.630025] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.max_placement_results = 1000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.630195] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.630357] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.630519] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.630689] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] scheduler.workers = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.630867] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.631049] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.631229] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.631397] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.631560] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.631724] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.631887] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.632088] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.632257] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.host_subset_size = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.632420] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.632581] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.632742] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.632904] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.633092] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.633257] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.isolated_hosts = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.633417] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.isolated_images = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.633574] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.633734] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.633917] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.634093] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.pci_in_placement = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.634257] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.634443] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.634608] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.634767] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.634928] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.635103] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.635266] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.track_instance_changes = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.635443] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.635612] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] metrics.required = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.635775] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] metrics.weight_multiplier = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.635965] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.636165] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] metrics.weight_setting = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.636491] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.636669] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] serial_console.enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.636845] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] serial_console.port_range = 10000:20000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.637028] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.637205] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.637373] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] serial_console.serialproxy_port = 6083 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.637539] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.637714] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.auth_type = password {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.637875] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.638044] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.638237] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.638413] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.638574] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.638746] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.send_service_user_token = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.638911] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.639085] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] service_user.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.639261] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.agent_enabled = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.639422] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.639713] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.639913] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.640103] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.html5proxy_port = 6082 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.640270] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.image_compression = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.640430] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.jpeg_compression = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.640589] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.playback_compression = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.640750] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.require_secure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.640918] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.server_listen = 127.0.0.1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.641100] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.641374] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.641546] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.streaming_mode = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.641708] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] spice.zlib_compression = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.641875] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] upgrade_levels.baseapi = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.642060] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] upgrade_levels.compute = auto {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.642224] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] upgrade_levels.conductor = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.642385] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] upgrade_levels.scheduler = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.642546] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.642708] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.642866] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.643033] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.643201] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.643360] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.643515] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.643674] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.643856] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vendordata_dynamic_auth.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.644051] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.api_retry_count = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.644216] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.ca_file = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.644388] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.644557] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.cluster_name = testcl1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.644720] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.connection_pool_size = 10 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.644876] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.console_delay_seconds = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.645058] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.datastore_regex = ^datastore.* {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.645268] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.645452] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.host_password = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.645620] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.host_port = 443 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.645788] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.host_username = administrator@vsphere.local {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.645989] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.insecure = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.646170] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.integration_bridge = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.646354] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.maximum_objects = 100 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.646531] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.pbm_default_policy = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.646700] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.pbm_enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.646860] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.pbm_wsdl_location = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.647041] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.647207] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.serial_port_proxy_uri = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.647365] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.serial_port_service_uri = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.647531] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.task_poll_interval = 0.5 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.647704] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.use_linked_clone = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.647871] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.vnc_keymap = en-us {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.648047] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.vnc_port = 5900 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.648220] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vmware.vnc_port_total = 10000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.648397] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.auth_schemes = ['none'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.648572] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.648851] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.649048] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.649223] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.novncproxy_port = 6080 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.649412] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.server_listen = 127.0.0.1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.649591] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.649753] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.vencrypt_ca_certs = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.649913] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.vencrypt_client_cert = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.650092] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vnc.vencrypt_client_key = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.650272] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.650436] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.disable_deep_image_inspection = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.650596] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.650756] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.650917] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.651089] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.disable_rootwrap = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.651253] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.enable_numa_live_migration = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.651414] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.651573] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.651732] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.651888] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.libvirt_disable_apic = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.652056] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.652227] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.652512] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.652684] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.652846] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.653015] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.653184] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.653342] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.653498] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.653660] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.653869] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.654058] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.client_socket_timeout = 900 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.654227] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.default_pool_size = 1000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.654392] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.keep_alive = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.654555] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.max_header_line = 16384 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.654714] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.654871] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.ssl_ca_file = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.655039] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.ssl_cert_file = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.655201] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.ssl_key_file = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.655369] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.tcp_keepidle = 600 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.655535] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.655698] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] zvm.ca_file = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.655872] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] zvm.cloud_connector_url = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.656191] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.656368] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] zvm.reachable_timeout = 300 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.656539] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.656715] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.656891] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.connection_string = messaging:// {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.657069] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.enabled = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.657242] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.es_doc_type = notification {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.657406] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.es_scroll_size = 10000 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.657572] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.es_scroll_time = 2m {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.657732] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.filter_error_trace = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.657897] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.hmac_keys = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.658076] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.sentinel_service_name = mymaster {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.658266] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.socket_timeout = 0.1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.658440] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.trace_requests = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.658601] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler.trace_sqlalchemy = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.658782] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler_jaeger.process_tags = {} {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.658943] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler_jaeger.service_name_prefix = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.659119] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] profiler_otlp.service_name_prefix = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.659285] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] remote_debug.host = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.659440] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] remote_debug.port = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.659611] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.659769] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.659928] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.660100] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.660263] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.660421] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.660577] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.660732] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.660890] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.661069] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.661230] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.661400] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.661564] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.661724] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.661882] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.662060] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.662225] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.662385] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.662551] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.662710] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.662867] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.663043] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.663214] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.663379] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.663539] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.663699] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.663891] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.664080] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.664249] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.664410] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.664570] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.ssl = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.664737] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.664903] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.665077] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.665250] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.665417] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.665580] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.665768] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.665962] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_notifications.retry = -1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.666168] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.666344] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.666517] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.auth_section = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.666682] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.auth_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.666847] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.cafile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.667013] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.certfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.667219] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.collect_timing = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.667388] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.connect_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.667547] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.connect_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.667706] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.endpoint_id = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.667876] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.endpoint_interface = publicURL {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.668048] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.endpoint_override = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.668211] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.endpoint_region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.668370] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.endpoint_service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.668526] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.endpoint_service_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.668688] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.insecure = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.668844] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.keyfile = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.669008] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.max_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.669175] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.min_version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.669331] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.region_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.669489] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.retriable_status_codes = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.669645] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.service_name = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.669800] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.service_type = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.669960] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.split_loggers = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.670132] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.status_code_retries = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.670317] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.status_code_retry_delay = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.670483] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.timeout = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.670640] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.valid_interfaces = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.670795] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_limit.version = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.670958] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_reports.file_event_handler = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.671138] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.671299] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] oslo_reports.log_dir = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.671471] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.671629] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.671789] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.671955] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.672135] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.672294] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.672462] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.672620] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_ovs_privileged.group = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.672774] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.672937] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.673110] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.673270] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] vif_plug_ovs_privileged.user = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.673440] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.673616] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.673807] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.673995] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.674185] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.674358] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.674525] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.674688] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.674866] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.675050] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_ovs.isolate_vif = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.675222] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.675391] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.675561] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.675734] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.675938] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] os_vif_ovs.per_port_bridge = False {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.676131] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] privsep_osbrick.capabilities = [21] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.676295] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] privsep_osbrick.group = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.676452] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] privsep_osbrick.helper_command = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.676619] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.676780] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.676957] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] privsep_osbrick.user = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.677127] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.677286] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] nova_sys_admin.group = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.677441] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] nova_sys_admin.helper_command = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.677604] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.677763] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.677920] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] nova_sys_admin.user = None {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 691.678064] env[68285]: DEBUG oslo_service.backend.eventlet.service [None req-8025b404-b3ab-43e5-952b-92a33d9da712 None None] ******************************************************************************** {{(pid=68285) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 691.678490] env[68285]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 692.182144] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Getting list of instances from cluster (obj){ [ 692.182144] env[68285]: value = "domain-c8" [ 692.182144] env[68285]: _type = "ClusterComputeResource" [ 692.182144] env[68285]: } {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 692.183219] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d8b39b-7932-4bd6-99f6-2febde7ea4a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.192123] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Got total of 0 instances {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 692.192680] env[68285]: WARNING nova.virt.vmwareapi.driver [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 692.193158] env[68285]: INFO nova.virt.node [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Generated node identity 7bdf675d-15ae-4a4b-9c03-79d8c773b76b [ 692.193388] env[68285]: INFO nova.virt.node [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Wrote node identity 7bdf675d-15ae-4a4b-9c03-79d8c773b76b to /opt/stack/data/n-cpu-1/compute_id [ 692.696219] env[68285]: WARNING nova.compute.manager [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Compute nodes ['7bdf675d-15ae-4a4b-9c03-79d8c773b76b'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 693.701068] env[68285]: INFO nova.compute.manager [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 694.706852] env[68285]: WARNING nova.compute.manager [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 694.707268] env[68285]: DEBUG oslo_concurrency.lockutils [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.707354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.707502] env[68285]: DEBUG oslo_concurrency.lockutils [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.707668] env[68285]: DEBUG nova.compute.resource_tracker [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 694.708584] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af18059d-c1b7-4296-91df-a99b1bac5fd2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.717027] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fa9b35-81a1-417d-886c-b739069b2825 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.731139] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1def2781-03f2-4347-bd01-fc9ef438c3b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.737451] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1641dd6-53b6-49a5-8bb7-ef452a69716d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.765588] env[68285]: DEBUG nova.compute.resource_tracker [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181128MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 694.765732] env[68285]: DEBUG oslo_concurrency.lockutils [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.765959] env[68285]: DEBUG oslo_concurrency.lockutils [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.269746] env[68285]: WARNING nova.compute.resource_tracker [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] No compute node record for cpu-1:7bdf675d-15ae-4a4b-9c03-79d8c773b76b: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 7bdf675d-15ae-4a4b-9c03-79d8c773b76b could not be found. [ 695.773439] env[68285]: INFO nova.compute.resource_tracker [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b [ 697.282144] env[68285]: DEBUG nova.compute.resource_tracker [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 697.282506] env[68285]: DEBUG nova.compute.resource_tracker [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 697.454939] env[68285]: INFO nova.scheduler.client.report [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] [req-58f2a3f6-6590-4e3a-811d-bcaea6acd434] Created resource provider record via placement API for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 697.472170] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d285e8c-04e2-4bd2-bb17-2ab633083291 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.479639] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3632d33e-190b-4aec-a6bd-d2cdbfe361bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.509083] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0660e7c-b034-489c-82d2-d348d8d84a40 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.515615] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d43f35c-3a7e-48b9-98a3-d6a0f908262a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.528167] env[68285]: DEBUG nova.compute.provider_tree [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.061978] env[68285]: DEBUG nova.scheduler.client.report [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 698.062314] env[68285]: DEBUG nova.compute.provider_tree [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 0 to 1 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 698.062549] env[68285]: DEBUG nova.compute.provider_tree [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.113536] env[68285]: DEBUG nova.compute.provider_tree [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 1 to 2 during operation: update_traits {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 698.618127] env[68285]: DEBUG nova.compute.resource_tracker [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 698.618500] env[68285]: DEBUG oslo_concurrency.lockutils [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.852s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.618500] env[68285]: DEBUG nova.service [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Creating RPC server for service compute {{(pid=68285) start /opt/stack/nova/nova/service.py:186}} [ 698.631619] env[68285]: DEBUG nova.service [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] Join ServiceGroup membership for this service compute {{(pid=68285) start /opt/stack/nova/nova/service.py:203}} [ 698.631785] env[68285]: DEBUG nova.servicegroup.drivers.db [None req-03a1dc76-c2d6-48a1-8c13-ad0e028141f2 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68285) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 730.636081] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.139130] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Getting list of instances from cluster (obj){ [ 731.139130] env[68285]: value = "domain-c8" [ 731.139130] env[68285]: _type = "ClusterComputeResource" [ 731.139130] env[68285]: } {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 731.140344] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5728b3-d6b0-4c95-8989-0b0ef3377a68 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.149303] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Got total of 0 instances {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 731.149539] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.149847] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Getting list of instances from cluster (obj){ [ 731.149847] env[68285]: value = "domain-c8" [ 731.149847] env[68285]: _type = "ClusterComputeResource" [ 731.149847] env[68285]: } {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 731.151298] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2007db-acb4-411b-96a1-c65ec9716541 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.159140] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Got total of 0 instances {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 738.067695] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.067963] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.432072] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.432072] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.574035] env[68285]: DEBUG nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 738.937652] env[68285]: DEBUG nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 739.125851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.127776] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.132457] env[68285]: INFO nova.compute.claims [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.474919] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.208179] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdf1887-6b0d-4ace-b621-b35d37c5ec0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.220106] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622e64b5-ab3a-44f9-9720-67c8d9e69dd0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.253547] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381e43ef-4269-4c96-b835-6247e02279ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.263182] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ee6325-5b77-4d44-89a6-1d63f4d41211 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.281179] env[68285]: DEBUG nova.compute.provider_tree [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.696513] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "105f0ad6-1591-40b9-997c-280860bd6501" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.696790] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "105f0ad6-1591-40b9-997c-280860bd6501" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.785283] env[68285]: DEBUG nova.scheduler.client.report [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.851401] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.851630] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.053469] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "682c3b6e-a605-486a-86c8-af173d80cbcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.054115] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.198953] env[68285]: DEBUG nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.290829] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.165s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.291404] env[68285]: DEBUG nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 741.295233] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.820s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.296666] env[68285]: INFO nova.compute.claims [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.360292] env[68285]: DEBUG nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.558035] env[68285]: DEBUG nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.647436] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.647436] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.675254] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "a97df3d2-c182-46d8-95c2-61caccade285" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.675688] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.739452] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.807132] env[68285]: DEBUG nova.compute.utils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 741.809655] env[68285]: DEBUG nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 741.809947] env[68285]: DEBUG nova.network.neutron [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.886190] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.081424] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.150797] env[68285]: DEBUG nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 742.180131] env[68285]: DEBUG nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 742.310305] env[68285]: DEBUG nova.policy [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3d35b21dca243edbc360ecbde764e3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd008f6669ed4e65919a8125d2ba8d2e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.315771] env[68285]: DEBUG nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 742.499874] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5531e46a-2916-48f7-8c2a-bda8ccb21d7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.510855] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3743930d-984f-47b3-92f0-aedf112235f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.549491] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f188b9ce-a05a-4fa2-83e2-2699a7f3a4b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.559139] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95a1917-daf7-4e14-932b-ee9994d89577 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.575292] env[68285]: DEBUG nova.compute.provider_tree [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.684065] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.709777] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.882358] env[68285]: DEBUG nova.network.neutron [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Successfully created port: 56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.080295] env[68285]: DEBUG nova.scheduler.client.report [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.339892] env[68285]: DEBUG nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 743.373110] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 743.373110] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.373110] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 743.373521] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.373521] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 743.373521] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 743.373521] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 743.373521] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 743.374214] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 743.374214] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 743.374325] env[68285]: DEBUG nova.virt.hardware [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 743.375240] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d822bd0-4491-4839-8fae-b588dd282608 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.386587] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d382187f-dba3-439a-954a-8b2101d996b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.409074] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75915405-496b-4a60-ab21-bf2d06ef434e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.588177] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.588823] env[68285]: DEBUG nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.591492] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.852s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.594248] env[68285]: INFO nova.compute.claims [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.098086] env[68285]: DEBUG nova.compute.utils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.105429] env[68285]: DEBUG nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.105546] env[68285]: DEBUG nova.network.neutron [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.216129] env[68285]: DEBUG nova.policy [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c52d82180944fdc960ee3c4d5597c94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f3de96c4b804dc5a95aa833fd4f8bef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.606418] env[68285]: DEBUG nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.738173] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d067f20c-12bb-4027-a8fa-b1d1bac111ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.745839] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50de0e01-191a-4cfd-a185-6ed224e64012 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.780048] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc16a34-6eee-40f8-a011-ef7ad98e9e42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.788921] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcd3527-67e3-4c67-aa3f-b120db5c0e03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.804330] env[68285]: DEBUG nova.compute.provider_tree [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.308140] env[68285]: DEBUG nova.scheduler.client.report [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.410341] env[68285]: DEBUG nova.network.neutron [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Successfully created port: c1fb0925-6895-4803-ab32-896f8eb94202 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.587123] env[68285]: DEBUG nova.network.neutron [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Successfully updated port: 56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.626433] env[68285]: DEBUG nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.666743] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.669861] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.673290] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.673290] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.673290] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.673290] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.673290] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.673444] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.673444] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.673444] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.673444] env[68285]: DEBUG nova.virt.hardware [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.673444] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3207a41d-6a77-4c8b-a02a-a17531a0b634 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.692015] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cccd223-ea36-4b6d-bcd8-062d6f9a049f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.728591] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.728591] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.819251] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.820254] env[68285]: DEBUG nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 745.826409] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.936s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.828127] env[68285]: INFO nova.compute.claims [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.090421] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.090597] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquired lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.091677] env[68285]: DEBUG nova.network.neutron [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.233308] env[68285]: DEBUG nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 746.342467] env[68285]: DEBUG nova.compute.utils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 746.342467] env[68285]: DEBUG nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 746.342467] env[68285]: DEBUG nova.network.neutron [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 746.522868] env[68285]: DEBUG nova.policy [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '255d34e6b0fc47c7be886b2311c74309', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74d2b141f0044c8985eae7c380a03466', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 746.763585] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.777375] env[68285]: DEBUG nova.network.neutron [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.845738] env[68285]: DEBUG nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 746.876023] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.876434] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.876696] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.876958] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.877205] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.877516] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.877693] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.878672] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 746.878849] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.008479] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758d7b5c-ebf7-4441-8866-c14259151a0a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.016340] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8ebe12-9d1b-440d-bb41-c1346f1f7967 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.051371] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ecd5bd-9199-4b69-ae4b-e3822a5dc8c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.060164] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8ff4f8-0990-4c9c-a4e3-6c0eaa028433 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.074029] env[68285]: DEBUG nova.compute.provider_tree [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.237639] env[68285]: DEBUG nova.compute.manager [req-af7bdd0e-a9b1-4243-b5b9-e5497ba04aac req-9d2d6cfc-06e7-4e46-aebd-f0ebdd7d83d0 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Received event network-vif-plugged-56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.237844] env[68285]: DEBUG oslo_concurrency.lockutils [req-af7bdd0e-a9b1-4243-b5b9-e5497ba04aac req-9d2d6cfc-06e7-4e46-aebd-f0ebdd7d83d0 service nova] Acquiring lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.238455] env[68285]: DEBUG oslo_concurrency.lockutils [req-af7bdd0e-a9b1-4243-b5b9-e5497ba04aac req-9d2d6cfc-06e7-4e46-aebd-f0ebdd7d83d0 service nova] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.238644] env[68285]: DEBUG oslo_concurrency.lockutils [req-af7bdd0e-a9b1-4243-b5b9-e5497ba04aac req-9d2d6cfc-06e7-4e46-aebd-f0ebdd7d83d0 service nova] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.238790] env[68285]: DEBUG nova.compute.manager [req-af7bdd0e-a9b1-4243-b5b9-e5497ba04aac req-9d2d6cfc-06e7-4e46-aebd-f0ebdd7d83d0 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] No waiting events found dispatching network-vif-plugged-56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 747.238955] env[68285]: WARNING nova.compute.manager [req-af7bdd0e-a9b1-4243-b5b9-e5497ba04aac req-9d2d6cfc-06e7-4e46-aebd-f0ebdd7d83d0 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Received unexpected event network-vif-plugged-56efc1e7-b396-4ba4-8104-803f5f018f35 for instance with vm_state building and task_state spawning. [ 747.302669] env[68285]: DEBUG nova.network.neutron [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Updating instance_info_cache with network_info: [{"id": "56efc1e7-b396-4ba4-8104-803f5f018f35", "address": "fa:16:3e:b6:95:74", "network": {"id": "8cd87e17-9031-47ac-b8d9-60c9036412d9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-424993255-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd008f6669ed4e65919a8125d2ba8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56efc1e7-b3", "ovs_interfaceid": "56efc1e7-b396-4ba4-8104-803f5f018f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.387767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.577131] env[68285]: DEBUG nova.scheduler.client.report [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.716641] env[68285]: DEBUG nova.network.neutron [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Successfully created port: b3b48e73-c170-4669-888a-5f674831a535 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.731552] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.735024] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.808781] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Releasing lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.810292] env[68285]: DEBUG nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Instance network_info: |[{"id": "56efc1e7-b396-4ba4-8104-803f5f018f35", "address": "fa:16:3e:b6:95:74", "network": {"id": "8cd87e17-9031-47ac-b8d9-60c9036412d9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-424993255-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd008f6669ed4e65919a8125d2ba8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56efc1e7-b3", "ovs_interfaceid": "56efc1e7-b396-4ba4-8104-803f5f018f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.810430] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:95:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56efc1e7-b396-4ba4-8104-803f5f018f35', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.832534] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.833279] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f2d99b0-ec37-47d2-a964-e52c1f64932e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.852030] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Created folder: OpenStack in parent group-v4. [ 747.852030] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Creating folder: Project (dd008f6669ed4e65919a8125d2ba8d2e). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.852030] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5921b61b-6ab2-457e-b8d1-3367ed8298f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.859535] env[68285]: DEBUG nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 747.864473] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Created folder: Project (dd008f6669ed4e65919a8125d2ba8d2e) in parent group-v580775. [ 747.864473] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Creating folder: Instances. Parent ref: group-v580776. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.864604] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1211a230-92b3-42b9-acb7-554af7a37590 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.874246] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Created folder: Instances in parent group-v580776. [ 747.874246] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.874246] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.874246] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1050322b-fb78-42b4-8796-13b5e70aa539 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.900406] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.900406] env[68285]: value = "task-2890986" [ 747.900406] env[68285]: _type = "Task" [ 747.900406] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.904019] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 747.904019] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.904019] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 747.904473] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.904473] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 747.904473] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 747.904473] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 747.904473] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 747.904628] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 747.904628] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 747.904628] env[68285]: DEBUG nova.virt.hardware [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 747.905154] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e217a5-fac2-4950-a698-1c804b85ac71 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.920749] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d68250-439b-4c8b-b8b9-324e0c9ffc3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.932277] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890986, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.013809] env[68285]: DEBUG nova.network.neutron [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Successfully updated port: c1fb0925-6895-4803-ab32-896f8eb94202 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.085444] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.086335] env[68285]: DEBUG nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 748.089104] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.008s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.090789] env[68285]: INFO nova.compute.claims [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.235452] env[68285]: DEBUG nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 748.419280] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890986, 'name': CreateVM_Task, 'duration_secs': 0.496562} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.419280] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.429447] env[68285]: DEBUG oslo_vmware.service [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6369b2e2-5f0d-4c78-b8d9-9a782e090124 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.437092] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.437092] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.437092] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 748.437586] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ddff9a6-1b11-4df8-a0c4-3cf9c3ba387e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.443157] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 748.443157] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5230094f-d4bb-5236-b8ac-df93e4f08a5d" [ 748.443157] env[68285]: _type = "Task" [ 748.443157] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.453612] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5230094f-d4bb-5236-b8ac-df93e4f08a5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.518232] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "refresh_cache-ef0636f4-3149-44e8-a4a3-62b9ede5dc28" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.518232] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquired lock "refresh_cache-ef0636f4-3149-44e8-a4a3-62b9ede5dc28" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.518455] env[68285]: DEBUG nova.network.neutron [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.596491] env[68285]: DEBUG nova.compute.utils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 748.606216] env[68285]: DEBUG nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 748.761589] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.957433] env[68285]: DEBUG nova.compute.manager [req-eb2c7344-a369-4cd6-a73f-1be22c9ad77b req-a3a9d352-576e-4b81-a409-5a7a416a13e1 service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Received event network-vif-plugged-c1fb0925-6895-4803-ab32-896f8eb94202 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 748.957728] env[68285]: DEBUG oslo_concurrency.lockutils [req-eb2c7344-a369-4cd6-a73f-1be22c9ad77b req-a3a9d352-576e-4b81-a409-5a7a416a13e1 service nova] Acquiring lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.958131] env[68285]: DEBUG oslo_concurrency.lockutils [req-eb2c7344-a369-4cd6-a73f-1be22c9ad77b req-a3a9d352-576e-4b81-a409-5a7a416a13e1 service nova] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.958131] env[68285]: DEBUG oslo_concurrency.lockutils [req-eb2c7344-a369-4cd6-a73f-1be22c9ad77b req-a3a9d352-576e-4b81-a409-5a7a416a13e1 service nova] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.958215] env[68285]: DEBUG nova.compute.manager [req-eb2c7344-a369-4cd6-a73f-1be22c9ad77b req-a3a9d352-576e-4b81-a409-5a7a416a13e1 service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] No waiting events found dispatching network-vif-plugged-c1fb0925-6895-4803-ab32-896f8eb94202 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 748.958375] env[68285]: WARNING nova.compute.manager [req-eb2c7344-a369-4cd6-a73f-1be22c9ad77b req-a3a9d352-576e-4b81-a409-5a7a416a13e1 service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Received unexpected event network-vif-plugged-c1fb0925-6895-4803-ab32-896f8eb94202 for instance with vm_state building and task_state spawning. [ 748.966960] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.967259] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.967508] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.967756] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.968115] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.968890] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1092953-f1df-441a-a893-843232999fd1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.987257] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.987483] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.989497] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6852ec98-7b6b-4dad-a046-a7e6753bce65 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.997216] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-caa40aac-0c0c-49a2-ba8c-7a308c53a8bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.002931] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 749.002931] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5286be11-3082-918b-f8ba-f138b664fb7e" [ 749.002931] env[68285]: _type = "Task" [ 749.002931] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.010823] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5286be11-3082-918b-f8ba-f138b664fb7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.057573] env[68285]: DEBUG nova.network.neutron [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.106886] env[68285]: DEBUG nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 749.267781] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86644a4e-03d2-4924-b95b-d02eb5efece7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.276715] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d04e04-4da3-4c17-a219-3cedb09e897b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.308052] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9b20cf-aa08-4615-bb77-8270bac803fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.315625] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a8da0f-d266-4a23-9e5c-614863e9962c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.330959] env[68285]: DEBUG nova.compute.provider_tree [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.341773] env[68285]: DEBUG nova.network.neutron [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Updating instance_info_cache with network_info: [{"id": "c1fb0925-6895-4803-ab32-896f8eb94202", "address": "fa:16:3e:32:6c:2b", "network": {"id": "c1e4f9b4-c2ac-4fdc-ba8f-0c8fa734758f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-195692460-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f3de96c4b804dc5a95aa833fd4f8bef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fb0925-68", "ovs_interfaceid": "c1fb0925-6895-4803-ab32-896f8eb94202", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.516035] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Preparing fetch location {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 749.516035] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Creating directory with path [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.516035] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95ce5f1e-fbcb-409f-9eb2-9703fd81c4aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.535041] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Created directory with path [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.535132] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Fetch image to [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 749.535273] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Downloading image file data ce84ab4c-9913-42dc-b839-714ad2184867 to [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk on the data store datastore1 {{(pid=68285) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 749.536106] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc36fa4-61a5-4cde-b004-c34fd3771d24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.544693] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c0ce02-cbf9-4fc8-961c-3c1ef4e6a8a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.559902] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3aa39f-cc86-4a5c-b535-0e5f8759541a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.591832] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f333300-d70a-4aa4-be88-dd3fda9fe890 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.598784] env[68285]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b06cad1b-0a38-41f9-87a6-ee09f5809363 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.694544] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Downloading image file data ce84ab4c-9913-42dc-b839-714ad2184867 to the data store datastore1 {{(pid=68285) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 749.816033] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68285) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 749.897362] env[68285]: DEBUG nova.scheduler.client.report [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.903405] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Releasing lock "refresh_cache-ef0636f4-3149-44e8-a4a3-62b9ede5dc28" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.903405] env[68285]: DEBUG nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Instance network_info: |[{"id": "c1fb0925-6895-4803-ab32-896f8eb94202", "address": "fa:16:3e:32:6c:2b", "network": {"id": "c1e4f9b4-c2ac-4fdc-ba8f-0c8fa734758f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-195692460-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f3de96c4b804dc5a95aa833fd4f8bef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fb0925-68", "ovs_interfaceid": "c1fb0925-6895-4803-ab32-896f8eb94202", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 749.905935] env[68285]: DEBUG nova.network.neutron [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Successfully updated port: b3b48e73-c170-4669-888a-5f674831a535 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.908873] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:6c:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55764410-260e-4339-a020-6b30995584bf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1fb0925-6895-4803-ab32-896f8eb94202', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.924970] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Creating folder: Project (5f3de96c4b804dc5a95aa833fd4f8bef). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.930172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.841s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.930815] env[68285]: DEBUG nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.933744] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c3060b4-d849-47a4-b8cc-15eff74594f0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.936119] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.253s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.937789] env[68285]: INFO nova.compute.claims [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.951358] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Created folder: Project (5f3de96c4b804dc5a95aa833fd4f8bef) in parent group-v580775. [ 749.952354] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Creating folder: Instances. Parent ref: group-v580779. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.952354] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-feac30b2-3466-4dfc-9831-66bffb45c949 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.970018] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Created folder: Instances in parent group-v580779. [ 749.970018] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.971227] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.971716] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2d431fa-2d0c-44c0-b448-22cef0de8985 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.002905] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.002905] env[68285]: value = "task-2890989" [ 750.002905] env[68285]: _type = "Task" [ 750.002905] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.014035] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890989, 'name': CreateVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.118975] env[68285]: DEBUG nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 750.156462] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 750.156811] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.157450] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 750.157712] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.157834] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 750.157971] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 750.158388] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 750.158603] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 750.158816] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 750.159011] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 750.159187] env[68285]: DEBUG nova.virt.hardware [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 750.160324] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4fc89f-7e50-4114-a6bc-7fed1ab8ec33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.177153] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e61cc09-7c65-4df0-a2f3-1dbfe4c07179 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.207520] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.213580] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Creating folder: Project (9f7e51c50ab84592acece9636100ef45). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.218134] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10955ff3-8914-4eab-a3b7-18eabb585f99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.229496] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Created folder: Project (9f7e51c50ab84592acece9636100ef45) in parent group-v580775. [ 750.229701] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Creating folder: Instances. Parent ref: group-v580782. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.230153] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c63748b2-a3a6-4850-b3fe-c0161bfffdc3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.239732] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Created folder: Instances in parent group-v580782. [ 750.240016] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.240215] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.240439] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76d8b7e4-5153-4977-8441-3d57cc2e0e02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.277912] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.277912] env[68285]: value = "task-2890992" [ 750.277912] env[68285]: _type = "Task" [ 750.277912] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.291173] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890992, 'name': CreateVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.343520] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.343824] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.368968] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.368968] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.409604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "refresh_cache-105f0ad6-1591-40b9-997c-280860bd6501" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.409604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired lock "refresh_cache-105f0ad6-1591-40b9-997c-280860bd6501" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.409604] env[68285]: DEBUG nova.network.neutron [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.442619] env[68285]: DEBUG nova.compute.utils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.449111] env[68285]: DEBUG nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 750.449338] env[68285]: DEBUG nova.network.neutron [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.518990] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890989, 'name': CreateVM_Task, 'duration_secs': 0.415704} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.523355] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.523355] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.523355] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.523576] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 750.524021] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28358c02-0345-4a64-aede-78d9f218101f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.529922] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 750.529922] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527060ff-5d9a-b092-8c75-2dba51d5cf76" [ 750.529922] env[68285]: _type = "Task" [ 750.529922] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.539868] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Completed reading data from the image iterator. {{(pid=68285) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 750.540130] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 750.548852] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.549208] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.549445] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.691987] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Downloaded image file data ce84ab4c-9913-42dc-b839-714ad2184867 to vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk on the data store datastore1 {{(pid=68285) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 750.695309] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Caching image {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 750.695566] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Copying Virtual Disk [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk to [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.695890] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e627523-77b5-4596-8341-436163211a16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.707247] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 750.707247] env[68285]: value = "task-2890993" [ 750.707247] env[68285]: _type = "Task" [ 750.707247] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.720417] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.759500] env[68285]: DEBUG nova.policy [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'acbce6d266aa4971b4bd3cd06e535fc0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8822845305049a2ba69dba0963753f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 750.773812] env[68285]: DEBUG nova.compute.manager [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Received event network-changed-56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.773993] env[68285]: DEBUG nova.compute.manager [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Refreshing instance network info cache due to event network-changed-56efc1e7-b396-4ba4-8104-803f5f018f35. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 750.774214] env[68285]: DEBUG oslo_concurrency.lockutils [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] Acquiring lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.774383] env[68285]: DEBUG oslo_concurrency.lockutils [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] Acquired lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.774536] env[68285]: DEBUG nova.network.neutron [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Refreshing network info cache for port 56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 750.790686] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890992, 'name': CreateVM_Task, 'duration_secs': 0.311737} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.790852] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.791273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.791436] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.791737] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 750.792419] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bee95e3c-63d2-401f-aaa2-cc14e04ab38e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.797216] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 750.797216] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ba3e34-3397-afb5-d29c-d9413d738ff4" [ 750.797216] env[68285]: _type = "Task" [ 750.797216] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.806149] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ba3e34-3397-afb5-d29c-d9413d738ff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.846041] env[68285]: DEBUG nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 750.958120] env[68285]: DEBUG nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 751.010465] env[68285]: DEBUG nova.network.neutron [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.181285] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6f4521-080d-441e-b72f-cb264a40167b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.191375] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b0ef96-5e0d-4109-ab54-8531bbf69109 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.240182] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60aad2a-0c92-45aa-9afa-fd7bddb8bb79 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.251103] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890993, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.253011] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d179d5-6dee-445c-9954-3bd3806088f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.269622] env[68285]: DEBUG nova.compute.provider_tree [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.312276] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.312535] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.312769] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.377091] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.520498] env[68285]: DEBUG nova.network.neutron [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Updating instance_info_cache with network_info: [{"id": "b3b48e73-c170-4669-888a-5f674831a535", "address": "fa:16:3e:b9:79:a1", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b48e73-c1", "ovs_interfaceid": "b3b48e73-c170-4669-888a-5f674831a535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.744446] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890993, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.778434} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.744910] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Copied Virtual Disk [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk to [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.745330] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Deleting the datastore file [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.745578] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82779ab8-0dad-46a3-aeae-9416d02c3347 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.749097] env[68285]: DEBUG nova.network.neutron [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Successfully created port: 3e9ecf12-c47b-42e6-8dcf-0963075951af {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.755922] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 751.755922] env[68285]: value = "task-2890994" [ 751.755922] env[68285]: _type = "Task" [ 751.755922] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.764227] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.772257] env[68285]: DEBUG nova.scheduler.client.report [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.890715] env[68285]: DEBUG nova.network.neutron [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Updated VIF entry in instance network info cache for port 56efc1e7-b396-4ba4-8104-803f5f018f35. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.890715] env[68285]: DEBUG nova.network.neutron [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Updating instance_info_cache with network_info: [{"id": "56efc1e7-b396-4ba4-8104-803f5f018f35", "address": "fa:16:3e:b6:95:74", "network": {"id": "8cd87e17-9031-47ac-b8d9-60c9036412d9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-424993255-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd008f6669ed4e65919a8125d2ba8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56efc1e7-b3", "ovs_interfaceid": "56efc1e7-b396-4ba4-8104-803f5f018f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.968553] env[68285]: DEBUG nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 752.004670] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 752.004670] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.004670] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 752.004832] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.004832] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 752.004832] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 752.004832] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 752.004832] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 752.004994] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 752.005102] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 752.005238] env[68285]: DEBUG nova.virt.hardware [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 752.006188] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912571f4-dd4a-4ad2-904a-beb56814f6b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.014237] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1763e0-aa02-45bb-b4b6-35c8b4a97c85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.038205] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Releasing lock "refresh_cache-105f0ad6-1591-40b9-997c-280860bd6501" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.038205] env[68285]: DEBUG nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Instance network_info: |[{"id": "b3b48e73-c170-4669-888a-5f674831a535", "address": "fa:16:3e:b9:79:a1", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b48e73-c1", "ovs_interfaceid": "b3b48e73-c170-4669-888a-5f674831a535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 752.038638] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:79:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3b48e73-c170-4669-888a-5f674831a535', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.051590] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Creating folder: Project (74d2b141f0044c8985eae7c380a03466). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 752.052253] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fb97e18-0e75-4ed2-8b2e-36bf4bf717a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.062344] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Created folder: Project (74d2b141f0044c8985eae7c380a03466) in parent group-v580775. [ 752.062344] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Creating folder: Instances. Parent ref: group-v580785. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 752.062344] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23ea4fe5-2f0a-420f-80a2-3436687aa77f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.073631] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Created folder: Instances in parent group-v580785. [ 752.073872] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.074071] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 752.074275] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0b26623-1a54-4f7a-b9d1-a9a7fc9c8385 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.098478] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.098478] env[68285]: value = "task-2890997" [ 752.098478] env[68285]: _type = "Task" [ 752.098478] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.106731] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890997, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.142256] env[68285]: DEBUG nova.compute.manager [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Received event network-changed-c1fb0925-6895-4803-ab32-896f8eb94202 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.142256] env[68285]: DEBUG nova.compute.manager [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Refreshing instance network info cache due to event network-changed-c1fb0925-6895-4803-ab32-896f8eb94202. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 752.142256] env[68285]: DEBUG oslo_concurrency.lockutils [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] Acquiring lock "refresh_cache-ef0636f4-3149-44e8-a4a3-62b9ede5dc28" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.142256] env[68285]: DEBUG oslo_concurrency.lockutils [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] Acquired lock "refresh_cache-ef0636f4-3149-44e8-a4a3-62b9ede5dc28" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.142256] env[68285]: DEBUG nova.network.neutron [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Refreshing network info cache for port c1fb0925-6895-4803-ab32-896f8eb94202 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 752.270232] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026653} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.270478] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.270680] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Moving file from [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561/ce84ab4c-9913-42dc-b839-714ad2184867 to [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867. {{(pid=68285) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 752.270931] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-50636891-6da3-4e03-9ceb-9fc951f77b1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.276974] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.277538] env[68285]: DEBUG nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 752.282440] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.572s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.283701] env[68285]: INFO nova.compute.claims [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.290193] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 752.290193] env[68285]: value = "task-2890998" [ 752.290193] env[68285]: _type = "Task" [ 752.290193] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.306421] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890998, 'name': MoveDatastoreFile_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.394604] env[68285]: DEBUG oslo_concurrency.lockutils [req-f4135320-9354-40f1-978a-1a85c5b11d1d req-6632818e-767f-4d40-81c6-ad90e6102a43 service nova] Releasing lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.402257] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "60144efd-061e-4144-9541-b2321c9b0ec1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.402257] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "60144efd-061e-4144-9541-b2321c9b0ec1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.613968] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2890997, 'name': CreateVM_Task, 'duration_secs': 0.373823} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.614155] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 752.615774] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.615774] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.615774] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 752.615774] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6ab8573-c5c4-499f-bd12-d78bd759522f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.625427] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 752.625427] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d1d1cf-6db0-687a-d931-6372d995bfc4" [ 752.625427] env[68285]: _type = "Task" [ 752.625427] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.634559] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d1d1cf-6db0-687a-d931-6372d995bfc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.794973] env[68285]: DEBUG nova.compute.utils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.796387] env[68285]: DEBUG nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.796387] env[68285]: DEBUG nova.network.neutron [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 752.817741] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890998, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.023946} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.817741] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] File moved {{(pid=68285) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 752.817741] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Cleaning up location [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 752.817741] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Deleting the datastore file [datastore1] vmware_temp/16d90985-fe14-4773-8e30-542122dbb561 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.817741] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2d807c8-6a50-4b4e-a831-1c51fadeaed5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.824514] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 752.824514] env[68285]: value = "task-2890999" [ 752.824514] env[68285]: _type = "Task" [ 752.824514] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.840108] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.115735] env[68285]: DEBUG nova.policy [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eae13761e7e42eca6c6f1f427c83b3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c48a5914ac041a283a7a17d06b57bb9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 753.140483] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d1d1cf-6db0-687a-d931-6372d995bfc4, 'name': SearchDatastore_Task, 'duration_secs': 0.01059} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.140838] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.141301] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.141301] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.294193] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "5b58896c-cb07-48c8-ace0-385486a3e19d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.294465] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.308432] env[68285]: DEBUG nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 753.340249] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2890999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026421} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.340647] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.341638] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ab3361b-32a1-4559-9fa2-71b217906627 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.354041] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 753.354041] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52982e13-4f34-d96d-cc0e-f9af35f9e312" [ 753.354041] env[68285]: _type = "Task" [ 753.354041] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.367280] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52982e13-4f34-d96d-cc0e-f9af35f9e312, 'name': SearchDatastore_Task, 'duration_secs': 0.009212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.372020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.372020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] e28d0927-17c2-4256-93d4-ef0cc2c9b92a/e28d0927-17c2-4256-93d4-ef0cc2c9b92a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.372020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.372020] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.372373] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0875b0cb-5709-4ab2-9820-361a01b9d61d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.372987] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1389a529-c9f3-4a4d-9b20-ede83393165e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.380182] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 753.380182] env[68285]: value = "task-2891000" [ 753.380182] env[68285]: _type = "Task" [ 753.380182] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.382231] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.382231] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.385333] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a7a239a-b8d9-4059-b68a-778d0dd8789e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.396245] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.398038] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 753.398038] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52239e21-191d-3c09-89c1-22aee0d2fbf1" [ 753.398038] env[68285]: _type = "Task" [ 753.398038] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.406159] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52239e21-191d-3c09-89c1-22aee0d2fbf1, 'name': SearchDatastore_Task, 'duration_secs': 0.008287} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.411409] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a18efd28-6be8-4dc8-af92-0fdbe4da99da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.417900] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 753.417900] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e58464-f445-32cc-9bd5-d48e83df81ef" [ 753.417900] env[68285]: _type = "Task" [ 753.417900] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.424570] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e58464-f445-32cc-9bd5-d48e83df81ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.484641] env[68285]: DEBUG nova.network.neutron [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Updated VIF entry in instance network info cache for port c1fb0925-6895-4803-ab32-896f8eb94202. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 753.485158] env[68285]: DEBUG nova.network.neutron [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Updating instance_info_cache with network_info: [{"id": "c1fb0925-6895-4803-ab32-896f8eb94202", "address": "fa:16:3e:32:6c:2b", "network": {"id": "c1e4f9b4-c2ac-4fdc-ba8f-0c8fa734758f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-195692460-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f3de96c4b804dc5a95aa833fd4f8bef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55764410-260e-4339-a020-6b30995584bf", "external-id": "nsx-vlan-transportzone-551", "segmentation_id": 551, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fb0925-68", "ovs_interfaceid": "c1fb0925-6895-4803-ab32-896f8eb94202", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.593046] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfedab40-6e5d-45ab-9097-f1946e4ffe09 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.605422] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9bbfc6-e24d-42cc-956c-0e884385dfac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.640938] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce11bbad-2eb4-454b-89d5-4c472f4b549e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.649610] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d25abea-7b2a-4d35-a928-d1d736f83b5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.665484] env[68285]: DEBUG nova.compute.provider_tree [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 753.892392] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495228} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.895234] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] e28d0927-17c2-4256-93d4-ef0cc2c9b92a/e28d0927-17c2-4256-93d4-ef0cc2c9b92a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.895234] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.895234] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-427a9caf-086b-409b-9ab2-9ee5d3f2cb33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.900721] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 753.900721] env[68285]: value = "task-2891001" [ 753.900721] env[68285]: _type = "Task" [ 753.900721] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.909936] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.926753] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e58464-f445-32cc-9bd5-d48e83df81ef, 'name': SearchDatastore_Task, 'duration_secs': 0.008079} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.927181] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.927538] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ef0636f4-3149-44e8-a4a3-62b9ede5dc28/ef0636f4-3149-44e8-a4a3-62b9ede5dc28.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.927915] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.928188] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.928467] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18e78801-d5ca-47db-b374-67a9f75cd853 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.935534] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-622f15dd-b3e3-4553-888e-f18df41d39ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.944292] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 753.944292] env[68285]: value = "task-2891002" [ 753.944292] env[68285]: _type = "Task" [ 753.944292] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.950340] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.950605] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.952262] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c95e2a0d-2f5d-4e5e-9be0-329814bea8a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.957091] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.960152] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 753.960152] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5225c73c-099b-c3a5-2a77-13487070f134" [ 753.960152] env[68285]: _type = "Task" [ 753.960152] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.967597] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5225c73c-099b-c3a5-2a77-13487070f134, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.990452] env[68285]: DEBUG oslo_concurrency.lockutils [req-2442070f-3a1a-4f0d-9ed1-1e2d7de8d7cb req-3c02fcb3-701e-4fb6-8e48-23dad260b0dd service nova] Releasing lock "refresh_cache-ef0636f4-3149-44e8-a4a3-62b9ede5dc28" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.064275] env[68285]: DEBUG nova.network.neutron [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Successfully updated port: 3e9ecf12-c47b-42e6-8dcf-0963075951af {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 754.130173] env[68285]: DEBUG nova.network.neutron [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Successfully created port: 9f5b021e-af4a-40de-ac20-e018f2923ae7 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.196190] env[68285]: ERROR nova.scheduler.client.report [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [req-85da3e04-4b67-4fc1-b6dc-a264a8493578] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-85da3e04-4b67-4fc1-b6dc-a264a8493578"}]} [ 754.196939] env[68285]: DEBUG nova.compute.manager [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Received event network-vif-plugged-b3b48e73-c170-4669-888a-5f674831a535 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.197985] env[68285]: DEBUG oslo_concurrency.lockutils [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] Acquiring lock "105f0ad6-1591-40b9-997c-280860bd6501-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.198245] env[68285]: DEBUG oslo_concurrency.lockutils [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] Lock "105f0ad6-1591-40b9-997c-280860bd6501-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.198411] env[68285]: DEBUG oslo_concurrency.lockutils [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] Lock "105f0ad6-1591-40b9-997c-280860bd6501-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.198606] env[68285]: DEBUG nova.compute.manager [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] No waiting events found dispatching network-vif-plugged-b3b48e73-c170-4669-888a-5f674831a535 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 754.198764] env[68285]: WARNING nova.compute.manager [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Received unexpected event network-vif-plugged-b3b48e73-c170-4669-888a-5f674831a535 for instance with vm_state building and task_state spawning. [ 754.198982] env[68285]: DEBUG nova.compute.manager [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Received event network-changed-b3b48e73-c170-4669-888a-5f674831a535 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.200927] env[68285]: DEBUG nova.compute.manager [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Refreshing instance network info cache due to event network-changed-b3b48e73-c170-4669-888a-5f674831a535. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 754.201279] env[68285]: DEBUG oslo_concurrency.lockutils [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] Acquiring lock "refresh_cache-105f0ad6-1591-40b9-997c-280860bd6501" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.201383] env[68285]: DEBUG oslo_concurrency.lockutils [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] Acquired lock "refresh_cache-105f0ad6-1591-40b9-997c-280860bd6501" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.201638] env[68285]: DEBUG nova.network.neutron [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Refreshing network info cache for port b3b48e73-c170-4669-888a-5f674831a535 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.234992] env[68285]: DEBUG nova.scheduler.client.report [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 754.251886] env[68285]: DEBUG nova.scheduler.client.report [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 754.253326] env[68285]: DEBUG nova.compute.provider_tree [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 754.273981] env[68285]: DEBUG nova.scheduler.client.report [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 754.303646] env[68285]: DEBUG nova.scheduler.client.report [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 754.318510] env[68285]: DEBUG nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 754.368774] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 754.369035] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.369236] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 754.369422] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.369603] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 754.369944] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 754.369944] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 754.370204] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 754.370415] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 754.370603] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 754.370761] env[68285]: DEBUG nova.virt.hardware [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 754.371722] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06edec7-0017-4f71-818e-2f2a06835362 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.386622] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3e090a-27a3-4d13-bcc2-65f9461fe7b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.435692] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094296} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.435989] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.441330] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04967471-9271-4e89-a0b2-216a36235846 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.473903] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] e28d0927-17c2-4256-93d4-ef0cc2c9b92a/e28d0927-17c2-4256-93d4-ef0cc2c9b92a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.482456] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27b0864c-2de2-4662-b5e1-b0a5c091f05e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.504782] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509318} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.513374] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ef0636f4-3149-44e8-a4a3-62b9ede5dc28/ef0636f4-3149-44e8-a4a3-62b9ede5dc28.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.513834] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.513975] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5225c73c-099b-c3a5-2a77-13487070f134, 'name': SearchDatastore_Task, 'duration_secs': 0.011458} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.516698] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71aac3e3-9f28-4d35-ab58-50ec7f47e8ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.524191] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-395180f6-f353-4410-bdf9-8de0d7752be5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.526694] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 754.526694] env[68285]: value = "task-2891003" [ 754.526694] env[68285]: _type = "Task" [ 754.526694] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.533646] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 754.533646] env[68285]: value = "task-2891004" [ 754.533646] env[68285]: _type = "Task" [ 754.533646] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.533646] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 754.533646] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527e247f-2abf-9b7f-1ef4-dfe70ad0b2e8" [ 754.533646] env[68285]: _type = "Task" [ 754.533646] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.548745] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891003, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.552463] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.554566] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527e247f-2abf-9b7f-1ef4-dfe70ad0b2e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011629} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.555037] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.555106] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa/9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 754.557020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.557020] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.557020] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5576096-e6ca-48ed-b3bd-a6271cf5110e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.558188] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38df55b7-991e-4f45-887a-7dfffc5b3c23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.564916] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 754.564916] env[68285]: value = "task-2891005" [ 754.564916] env[68285]: _type = "Task" [ 754.564916] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.569692] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "refresh_cache-682c3b6e-a605-486a-86c8-af173d80cbcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.573784] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquired lock "refresh_cache-682c3b6e-a605-486a-86c8-af173d80cbcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.573784] env[68285]: DEBUG nova.network.neutron [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.573784] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.573784] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 754.575277] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a113d85c-9063-4a03-8016-6bf9d531acd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.583303] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891005, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.587802] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 754.587802] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b5f951-f1c2-dc95-c3af-451ffb3c5f75" [ 754.587802] env[68285]: _type = "Task" [ 754.587802] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.596873] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b5f951-f1c2-dc95-c3af-451ffb3c5f75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.726783] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3823be-fbde-46a2-b59f-822af24d267d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.740610] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b705a1-bdab-4546-8e5a-f6bf69d47f03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.782221] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108054c1-e73f-4474-86e3-27b21bf494ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.791800] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1835c85-8a9b-4dfc-b772-4d47455223b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.808645] env[68285]: DEBUG nova.compute.provider_tree [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.047075] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891003, 'name': ReconfigVM_Task, 'duration_secs': 0.333731} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.050249] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Reconfigured VM instance instance-00000001 to attach disk [datastore1] e28d0927-17c2-4256-93d4-ef0cc2c9b92a/e28d0927-17c2-4256-93d4-ef0cc2c9b92a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.051134] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063788} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.051438] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94a402aa-fdb9-4b73-8466-0c08482d4b95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.053203] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.054019] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2440eb-b07a-4054-9948-ee9f30942abc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.080798] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] ef0636f4-3149-44e8-a4a3-62b9ede5dc28/ef0636f4-3149-44e8-a4a3-62b9ede5dc28.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.087886] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6a88535-9f11-4ac9-adaa-27b940a16354 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.102928] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 755.102928] env[68285]: value = "task-2891006" [ 755.102928] env[68285]: _type = "Task" [ 755.102928] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.114341] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891005, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526709} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.114668] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 755.114668] env[68285]: value = "task-2891007" [ 755.114668] env[68285]: _type = "Task" [ 755.114668] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.116124] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa/9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.116430] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.116636] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb02a76c-45e0-4f56-819d-89a14f31c04b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.130349] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b5f951-f1c2-dc95-c3af-451ffb3c5f75, 'name': SearchDatastore_Task, 'duration_secs': 0.013297} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.134643] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e1d5c55-994d-4a3c-a67c-2f68003bc154 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.144272] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891006, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.144621] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 755.144621] env[68285]: value = "task-2891008" [ 755.144621] env[68285]: _type = "Task" [ 755.144621] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.144878] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891007, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.150657] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 755.150657] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52115239-d092-e8ea-e261-a7cb79f4cff4" [ 755.150657] env[68285]: _type = "Task" [ 755.150657] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.159188] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.165722] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52115239-d092-e8ea-e261-a7cb79f4cff4, 'name': SearchDatastore_Task, 'duration_secs': 0.011156} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.169121] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.169121] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 105f0ad6-1591-40b9-997c-280860bd6501/105f0ad6-1591-40b9-997c-280860bd6501.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 755.169121] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a65f25f-8a87-4e2f-aab6-b10321dc91ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.174324] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 755.174324] env[68285]: value = "task-2891009" [ 755.174324] env[68285]: _type = "Task" [ 755.174324] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.185364] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.193884] env[68285]: DEBUG nova.network.neutron [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.358330] env[68285]: DEBUG nova.scheduler.client.report [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 755.358640] env[68285]: DEBUG nova.compute.provider_tree [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 17 to 18 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 755.358850] env[68285]: DEBUG nova.compute.provider_tree [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.616597] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891006, 'name': Rename_Task, 'duration_secs': 0.158705} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.616597] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.618192] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85dacdb0-1be8-470a-8411-5c55d0660a16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.628992] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891007, 'name': ReconfigVM_Task, 'duration_secs': 0.374936} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.630713] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Reconfigured VM instance instance-00000002 to attach disk [datastore1] ef0636f4-3149-44e8-a4a3-62b9ede5dc28/ef0636f4-3149-44e8-a4a3-62b9ede5dc28.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.630916] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 755.630916] env[68285]: value = "task-2891010" [ 755.630916] env[68285]: _type = "Task" [ 755.630916] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.632989] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62f8a729-f405-4ec3-946b-eff84c2659ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.637721] env[68285]: DEBUG nova.network.neutron [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Updated VIF entry in instance network info cache for port b3b48e73-c170-4669-888a-5f674831a535. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.638241] env[68285]: DEBUG nova.network.neutron [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Updating instance_info_cache with network_info: [{"id": "b3b48e73-c170-4669-888a-5f674831a535", "address": "fa:16:3e:b9:79:a1", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b48e73-c1", "ovs_interfaceid": "b3b48e73-c170-4669-888a-5f674831a535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.641180] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 755.641180] env[68285]: value = "task-2891011" [ 755.641180] env[68285]: _type = "Task" [ 755.641180] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.657697] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891011, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.659276] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891008, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11641} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.659527] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.660319] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f181864f-e2c8-4d80-8e00-d181d6d87c4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.691072] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa/9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.694321] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b91aac96-d776-4777-b63e-efeb51cc8c26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.717554] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468878} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.719122] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 105f0ad6-1591-40b9-997c-280860bd6501/105f0ad6-1591-40b9-997c-280860bd6501.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.719328] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.719478] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 755.719478] env[68285]: value = "task-2891012" [ 755.719478] env[68285]: _type = "Task" [ 755.719478] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.719657] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54f054a5-947f-4440-9f7b-fc1810313feb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.730344] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891012, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.731680] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 755.731680] env[68285]: value = "task-2891013" [ 755.731680] env[68285]: _type = "Task" [ 755.731680] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.739872] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891013, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.865210] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.583s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.865932] env[68285]: DEBUG nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 755.869829] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.106s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.871883] env[68285]: INFO nova.compute.claims [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.874636] env[68285]: DEBUG nova.network.neutron [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Updating instance_info_cache with network_info: [{"id": "3e9ecf12-c47b-42e6-8dcf-0963075951af", "address": "fa:16:3e:b6:b5:9d", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9ecf12-c4", "ovs_interfaceid": "3e9ecf12-c47b-42e6-8dcf-0963075951af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.145988] env[68285]: DEBUG oslo_concurrency.lockutils [req-bfa204cf-5807-4454-af35-e63242542758 req-ede30169-9b3f-4d88-9d3b-47ddc2911353 service nova] Releasing lock "refresh_cache-105f0ad6-1591-40b9-997c-280860bd6501" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.149779] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891010, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.164103] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891011, 'name': Rename_Task, 'duration_secs': 0.238555} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.165607] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.165607] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ca1ade1-0547-46f3-a9e2-07c8feaa9276 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.173394] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 756.173394] env[68285]: value = "task-2891014" [ 756.173394] env[68285]: _type = "Task" [ 756.173394] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.188518] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.236894] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891012, 'name': ReconfigVM_Task, 'duration_secs': 0.424932} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.240556] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa/9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.241648] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57ffe1cf-1881-44e4-a00b-08fa05dd7be9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.247823] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891013, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058962} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.249830] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 756.250283] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 756.250283] env[68285]: value = "task-2891015" [ 756.250283] env[68285]: _type = "Task" [ 756.250283] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.251725] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c424f4-958d-4478-a3df-600a12437a88 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.263495] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891015, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.289298] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 105f0ad6-1591-40b9-997c-280860bd6501/105f0ad6-1591-40b9-997c-280860bd6501.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 756.290375] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa4b7ccd-56b0-4806-8696-0c953120acf7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.313969] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 756.313969] env[68285]: value = "task-2891016" [ 756.313969] env[68285]: _type = "Task" [ 756.313969] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.326350] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891016, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.378323] env[68285]: DEBUG nova.compute.utils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 756.381596] env[68285]: DEBUG nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 756.381811] env[68285]: DEBUG nova.network.neutron [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.389314] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Releasing lock "refresh_cache-682c3b6e-a605-486a-86c8-af173d80cbcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.389603] env[68285]: DEBUG nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Instance network_info: |[{"id": "3e9ecf12-c47b-42e6-8dcf-0963075951af", "address": "fa:16:3e:b6:b5:9d", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9ecf12-c4", "ovs_interfaceid": "3e9ecf12-c47b-42e6-8dcf-0963075951af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 756.389975] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:b5:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e9ecf12-c47b-42e6-8dcf-0963075951af', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.403218] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Creating folder: Project (d8822845305049a2ba69dba0963753f0). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.403883] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-002af65b-25ad-4c31-bdcf-69e93e86b752 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.420834] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Created folder: Project (d8822845305049a2ba69dba0963753f0) in parent group-v580775. [ 756.420834] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Creating folder: Instances. Parent ref: group-v580788. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.420834] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d4aa09d-5272-446d-9c3b-8df715a18084 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.427648] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Created folder: Instances in parent group-v580788. [ 756.427869] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.428058] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.428323] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f77e51d3-12da-4cbd-829f-65f398a16054 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.447872] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.447872] env[68285]: value = "task-2891019" [ 756.447872] env[68285]: _type = "Task" [ 756.447872] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.456821] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891019, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.608118] env[68285]: DEBUG nova.policy [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd27450be410458ba1f009b191126755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f891a62d3df3400fa53ac94230bcb8a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 756.645846] env[68285]: DEBUG oslo_vmware.api [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891010, 'name': PowerOnVM_Task, 'duration_secs': 0.57178} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.646443] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.646902] env[68285]: INFO nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Took 13.31 seconds to spawn the instance on the hypervisor. [ 756.647179] env[68285]: DEBUG nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.651418] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddaf8db-f7c4-479d-9d96-60bbba60ef5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.689632] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891014, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.725036] env[68285]: DEBUG nova.network.neutron [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Successfully updated port: 9f5b021e-af4a-40de-ac20-e018f2923ae7 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 756.767782] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891015, 'name': Rename_Task, 'duration_secs': 0.324893} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.767782] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.767782] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a631fced-ed5f-4fcb-9314-27b3896cd913 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.773584] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 756.773584] env[68285]: value = "task-2891020" [ 756.773584] env[68285]: _type = "Task" [ 756.773584] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.784992] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.825740] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.893837] env[68285]: DEBUG nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 756.965305] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891019, 'name': CreateVM_Task, 'duration_secs': 0.50939} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.965583] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.966978] env[68285]: DEBUG oslo_vmware.service [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780e8917-c868-447d-8b45-e5d5ca16ca87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.974577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.974684] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.975073] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.976774] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-862ea0ee-87f9-4dc8-81c9-755094c58947 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.981763] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 756.981763] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521d8c69-1fc4-8f31-3ebe-d6c890692846" [ 756.981763] env[68285]: _type = "Task" [ 756.981763] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.994449] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521d8c69-1fc4-8f31-3ebe-d6c890692846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.144600] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38a211d-cdb6-4ec2-a5c6-8c133619e489 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.152356] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ae85b7-ad74-41a5-8bb7-5e83c7b30b15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.200965] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0db49bd-b496-4239-b20c-f61cd19ca4c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.205819] env[68285]: INFO nova.compute.manager [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Took 18.12 seconds to build instance. [ 757.213041] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891014, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.216482] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f01f018-92b3-4b13-9a89-adde03664821 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.232921] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "refresh_cache-f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.233080] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquired lock "refresh_cache-f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.233232] env[68285]: DEBUG nova.network.neutron [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.235348] env[68285]: DEBUG nova.compute.provider_tree [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.286045] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891020, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.329730] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891016, 'name': ReconfigVM_Task, 'duration_secs': 0.619011} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.329730] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 105f0ad6-1591-40b9-997c-280860bd6501/105f0ad6-1591-40b9-997c-280860bd6501.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 757.329730] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbff07a5-eb93-4c78-90ac-a9c6bd17b52c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.338442] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 757.338442] env[68285]: value = "task-2891021" [ 757.338442] env[68285]: _type = "Task" [ 757.338442] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.348427] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891021, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.496708] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.497271] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.498178] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.500578] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.500578] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.500578] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4e38d73-24e2-453a-94eb-5f41fb1b7bbd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.514010] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.514467] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.515203] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba8dbe1-5ae8-4fb0-8223-c2ccdb551951 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.524737] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f31bdb7-a4dd-4c43-802b-910c2cf77627 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.531099] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 757.531099] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd1be4-9d0a-87ef-ca26-2d53dae34e55" [ 757.531099] env[68285]: _type = "Task" [ 757.531099] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.541174] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fd1be4-9d0a-87ef-ca26-2d53dae34e55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.688368] env[68285]: DEBUG nova.network.neutron [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Successfully created port: 84cbe58d-a7c4-4c42-9f87-9a6b62805b10 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.707033] env[68285]: DEBUG oslo_vmware.api [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891014, 'name': PowerOnVM_Task, 'duration_secs': 1.270184} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.707428] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.707428] env[68285]: INFO nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Took 12.08 seconds to spawn the instance on the hypervisor. [ 757.707588] env[68285]: DEBUG nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.708325] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d0880581-88e9-4206-873f-e1c4646e2214 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.640s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.709571] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1646fbb-32e2-453e-ae47-28a0b300f86b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.740733] env[68285]: DEBUG nova.scheduler.client.report [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.787332] env[68285]: DEBUG oslo_vmware.api [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891020, 'name': PowerOnVM_Task, 'duration_secs': 0.8615} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.787332] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.787332] env[68285]: INFO nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Took 7.67 seconds to spawn the instance on the hypervisor. [ 757.787799] env[68285]: DEBUG nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.788273] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12632182-be14-434d-b092-fe67ad84aaaf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.844954] env[68285]: DEBUG nova.network.neutron [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.855619] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891021, 'name': Rename_Task, 'duration_secs': 0.205011} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.855619] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.855619] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d3b9228-1d40-451b-af0c-da009fa277dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.860064] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 757.860064] env[68285]: value = "task-2891022" [ 757.860064] env[68285]: _type = "Task" [ 757.860064] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.868445] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.909527] env[68285]: DEBUG nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 757.942677] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 757.942910] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.943091] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 757.943277] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.943420] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 757.943725] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 757.943961] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 757.944149] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 757.944313] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 757.944472] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 757.944715] env[68285]: DEBUG nova.virt.hardware [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 757.948359] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a051f041-bd84-4c89-a535-189ac3e94921 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.954452] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6221662c-3498-452b-b025-89eb297ab0ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.047744] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Preparing fetch location {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 758.047972] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Creating directory with path [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 758.048234] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46883961-d6f3-4713-95a5-1e9c289a1155 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.088271] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Created directory with path [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 758.088271] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Fetch image to [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 758.088271] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Downloading image file data ce84ab4c-9913-42dc-b839-714ad2184867 to [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk on the data store datastore2 {{(pid=68285) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 758.091350] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20268ae0-88d1-45fb-b1aa-89e5c00fea57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.099432] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626ee6cb-45a0-4147-aa36-e7f80e633078 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.110319] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29392a4-8864-473b-971e-c4529a9ecae5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.149516] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d34abd7-ab27-4379-818b-ea3416c74a0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.156595] env[68285]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9c5c1ed7-8571-4a18-9699-ebe1c8392237 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.219569] env[68285]: DEBUG nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.245890] env[68285]: INFO nova.compute.manager [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Took 18.80 seconds to build instance. [ 758.247173] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.247966] env[68285]: DEBUG nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 758.250897] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.863s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.251069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.251217] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 758.251479] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.490s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.254021] env[68285]: INFO nova.compute.claims [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.257945] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d330d66-ef1f-4885-8cbf-5eec00866556 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.264466] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Downloading image file data ce84ab4c-9913-42dc-b839-714ad2184867 to the data store datastore2 {{(pid=68285) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 758.278148] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d15f656-9928-4832-ba4e-70f91b925852 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.301347] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe9cb19-1983-4cc0-89cf-8c0deba5499e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.319701] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9e0803-37a1-419d-8942-307aa568e9ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.323828] env[68285]: INFO nova.compute.manager [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Took 16.46 seconds to build instance. [ 758.363407] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181120MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 758.363491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.367622] env[68285]: DEBUG oslo_vmware.rw_handles [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68285) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 758.434866] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891022, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.567197] env[68285]: DEBUG nova.compute.manager [req-a5645c87-bd73-4d85-9b45-7221223052e6 req-aebeefaf-4157-4999-abd2-d5f87343108d service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Received event network-vif-plugged-9f5b021e-af4a-40de-ac20-e018f2923ae7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.567457] env[68285]: DEBUG oslo_concurrency.lockutils [req-a5645c87-bd73-4d85-9b45-7221223052e6 req-aebeefaf-4157-4999-abd2-d5f87343108d service nova] Acquiring lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.568848] env[68285]: DEBUG oslo_concurrency.lockutils [req-a5645c87-bd73-4d85-9b45-7221223052e6 req-aebeefaf-4157-4999-abd2-d5f87343108d service nova] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.568848] env[68285]: DEBUG oslo_concurrency.lockutils [req-a5645c87-bd73-4d85-9b45-7221223052e6 req-aebeefaf-4157-4999-abd2-d5f87343108d service nova] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.568848] env[68285]: DEBUG nova.compute.manager [req-a5645c87-bd73-4d85-9b45-7221223052e6 req-aebeefaf-4157-4999-abd2-d5f87343108d service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] No waiting events found dispatching network-vif-plugged-9f5b021e-af4a-40de-ac20-e018f2923ae7 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 758.568848] env[68285]: WARNING nova.compute.manager [req-a5645c87-bd73-4d85-9b45-7221223052e6 req-aebeefaf-4157-4999-abd2-d5f87343108d service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Received unexpected event network-vif-plugged-9f5b021e-af4a-40de-ac20-e018f2923ae7 for instance with vm_state building and task_state spawning. [ 758.592565] env[68285]: DEBUG nova.compute.manager [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Received event network-vif-plugged-3e9ecf12-c47b-42e6-8dcf-0963075951af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.592699] env[68285]: DEBUG oslo_concurrency.lockutils [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] Acquiring lock "682c3b6e-a605-486a-86c8-af173d80cbcf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.592899] env[68285]: DEBUG oslo_concurrency.lockutils [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.593091] env[68285]: DEBUG oslo_concurrency.lockutils [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.593259] env[68285]: DEBUG nova.compute.manager [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] No waiting events found dispatching network-vif-plugged-3e9ecf12-c47b-42e6-8dcf-0963075951af {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 758.593422] env[68285]: WARNING nova.compute.manager [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Received unexpected event network-vif-plugged-3e9ecf12-c47b-42e6-8dcf-0963075951af for instance with vm_state building and task_state spawning. [ 758.593585] env[68285]: DEBUG nova.compute.manager [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Received event network-changed-3e9ecf12-c47b-42e6-8dcf-0963075951af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.593722] env[68285]: DEBUG nova.compute.manager [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Refreshing instance network info cache due to event network-changed-3e9ecf12-c47b-42e6-8dcf-0963075951af. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 758.593929] env[68285]: DEBUG oslo_concurrency.lockutils [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] Acquiring lock "refresh_cache-682c3b6e-a605-486a-86c8-af173d80cbcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.596394] env[68285]: DEBUG oslo_concurrency.lockutils [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] Acquired lock "refresh_cache-682c3b6e-a605-486a-86c8-af173d80cbcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.596394] env[68285]: DEBUG nova.network.neutron [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Refreshing network info cache for port 3e9ecf12-c47b-42e6-8dcf-0963075951af {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.609200] env[68285]: DEBUG nova.network.neutron [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Updating instance_info_cache with network_info: [{"id": "9f5b021e-af4a-40de-ac20-e018f2923ae7", "address": "fa:16:3e:60:f5:49", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5b021e-af", "ovs_interfaceid": "9f5b021e-af4a-40de-ac20-e018f2923ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.752515] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a1dca5c6-bdd1-4245-91f9-36d439311ab1 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.321s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.758744] env[68285]: DEBUG nova.compute.utils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 758.763210] env[68285]: DEBUG nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 758.763420] env[68285]: DEBUG nova.network.neutron [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.768073] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.827651] env[68285]: DEBUG oslo_concurrency.lockutils [None req-372384dd-e5b8-40ee-a618-349e4298464d tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.976s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.881795] env[68285]: DEBUG oslo_vmware.api [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891022, 'name': PowerOnVM_Task, 'duration_secs': 0.925367} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.882606] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.882814] env[68285]: INFO nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Took 11.02 seconds to spawn the instance on the hypervisor. [ 758.882990] env[68285]: DEBUG nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 758.884670] env[68285]: DEBUG nova.policy [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c07415c552542bda58552ad79163d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb8e48ceae0748b0b8c762ab7303a4b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 758.887648] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7bc286-c016-4471-9dc3-47c060300128 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.113298] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Releasing lock "refresh_cache-f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.113298] env[68285]: DEBUG nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Instance network_info: |[{"id": "9f5b021e-af4a-40de-ac20-e018f2923ae7", "address": "fa:16:3e:60:f5:49", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5b021e-af", "ovs_interfaceid": "9f5b021e-af4a-40de-ac20-e018f2923ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 759.113542] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:f5:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f5b021e-af4a-40de-ac20-e018f2923ae7', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.122968] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Creating folder: Project (7c48a5914ac041a283a7a17d06b57bb9). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.129952] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-527333d1-9ca8-4f67-8768-5b024a6d2660 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.140142] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Created folder: Project (7c48a5914ac041a283a7a17d06b57bb9) in parent group-v580775. [ 759.140142] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Creating folder: Instances. Parent ref: group-v580791. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.140371] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3080133-2ce5-4fcd-9e2f-b518e218eb5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.150050] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Created folder: Instances in parent group-v580791. [ 759.150319] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 759.150732] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 759.151448] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1f99af6-52d8-406e-93d8-82e083ad3b77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.177163] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.177163] env[68285]: value = "task-2891025" [ 759.177163] env[68285]: _type = "Task" [ 759.177163] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.184803] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891025, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.245261] env[68285]: DEBUG oslo_vmware.rw_handles [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Completed reading data from the image iterator. {{(pid=68285) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 759.245593] env[68285]: DEBUG oslo_vmware.rw_handles [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 759.255352] env[68285]: DEBUG nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 759.267623] env[68285]: DEBUG nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.334293] env[68285]: DEBUG nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 759.364076] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Downloaded image file data ce84ab4c-9913-42dc-b839-714ad2184867 to vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk on the data store datastore2 {{(pid=68285) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 759.365882] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Caching image {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 759.366193] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Copying Virtual Disk [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk to [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 759.367332] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3beb5ca1-90ec-4a7a-8fad-1f01ba3f4454 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.392701] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 759.392701] env[68285]: value = "task-2891026" [ 759.392701] env[68285]: _type = "Task" [ 759.392701] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.408283] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891026, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.414582] env[68285]: INFO nova.compute.manager [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Took 17.71 seconds to build instance. [ 759.500626] env[68285]: DEBUG nova.network.neutron [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Updated VIF entry in instance network info cache for port 3e9ecf12-c47b-42e6-8dcf-0963075951af. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.501158] env[68285]: DEBUG nova.network.neutron [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Updating instance_info_cache with network_info: [{"id": "3e9ecf12-c47b-42e6-8dcf-0963075951af", "address": "fa:16:3e:b6:b5:9d", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e9ecf12-c4", "ovs_interfaceid": "3e9ecf12-c47b-42e6-8dcf-0963075951af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.598480] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7e49cf-1e13-4812-acf5-952fc17d06bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.606187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e475fd38-1b86-4d62-83c1-978c8e55c05f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.644387] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d951c5-8886-47c3-a589-be5fdc349e66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.652564] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412f71a1-5a8b-4238-8bfb-c8de60eb56c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.667290] env[68285]: DEBUG nova.compute.provider_tree [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.685974] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891025, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.719960] env[68285]: DEBUG nova.network.neutron [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Successfully created port: ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.796991] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.874696] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.907669] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891026, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.918982] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f387a370-4ac3-4888-88d3-f55b2a0afa7f tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "105f0ad6-1591-40b9-997c-280860bd6501" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.222s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.007916] env[68285]: DEBUG oslo_concurrency.lockutils [req-1c0dec3c-2258-4d88-94f5-1ef38e8b239b req-17328758-d850-44dd-be19-9ebe8e9ed138 service nova] Releasing lock "refresh_cache-682c3b6e-a605-486a-86c8-af173d80cbcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.113499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "12fad42a-1011-4563-b11f-7b141b2a1670" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.113499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "12fad42a-1011-4563-b11f-7b141b2a1670" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.170373] env[68285]: DEBUG nova.scheduler.client.report [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.190084] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891025, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.280835] env[68285]: DEBUG nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.317166] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.317166] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.317166] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.317581] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.317581] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.317581] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.317581] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.317581] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.317738] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.317932] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.318026] env[68285]: DEBUG nova.virt.hardware [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.318935] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773def3d-59d2-4b3c-9255-40294f521749 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.333742] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7eaab6-7206-4e59-9c8a-fd63744c2649 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.406749] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891026, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.935139} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.407116] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Copied Virtual Disk [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk to [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 760.407322] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Deleting the datastore file [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867/tmp-sparse.vmdk {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.407628] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c87b18a-3357-4267-a946-7dc19606e95b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.417641] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 760.417641] env[68285]: value = "task-2891027" [ 760.417641] env[68285]: _type = "Task" [ 760.417641] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.429939] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.480021] env[68285]: DEBUG nova.network.neutron [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Successfully updated port: 84cbe58d-a7c4-4c42-9f87-9a6b62805b10 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 760.598306] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "87582063-50f9-4518-ad2d-915c9cd49b19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.598306] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "87582063-50f9-4518-ad2d-915c9cd49b19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.616296] env[68285]: DEBUG nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 760.675875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.676586] env[68285]: DEBUG nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 760.681118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.304s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.685303] env[68285]: INFO nova.compute.claims [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.705618] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891025, 'name': CreateVM_Task, 'duration_secs': 1.237025} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.705618] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 760.705618] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.706304] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.706948] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 760.710035] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f9af4d7-1cd2-4b2c-b19b-02028362e7ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.717466] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 760.717466] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523852ba-f673-f5da-dc07-6a8be017de56" [ 760.717466] env[68285]: _type = "Task" [ 760.717466] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.732282] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523852ba-f673-f5da-dc07-6a8be017de56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.852462] env[68285]: DEBUG nova.compute.manager [None req-4ae4ad2a-f431-4049-bef5-db29cb654998 tempest-ServerDiagnosticsV248Test-390296692 tempest-ServerDiagnosticsV248Test-390296692-project-admin] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 760.858057] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d46ca9-5d61-4df8-b64a-02efd0694e1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.874336] env[68285]: INFO nova.compute.manager [None req-4ae4ad2a-f431-4049-bef5-db29cb654998 tempest-ServerDiagnosticsV248Test-390296692 tempest-ServerDiagnosticsV248Test-390296692-project-admin] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Retrieving diagnostics [ 760.875224] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5fbfd4-0b15-44f4-b212-d8d10dbd8c16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.930901] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036733} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.930901] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 760.930901] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Moving file from [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a/ce84ab4c-9913-42dc-b839-714ad2184867 to [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867. {{(pid=68285) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 760.931150] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f5d4350a-df09-4b3d-af4f-4bc1a4cb169b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.941198] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 760.941198] env[68285]: value = "task-2891028" [ 760.941198] env[68285]: _type = "Task" [ 760.941198] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.952229] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891028, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.985134] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.985313] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.985480] env[68285]: DEBUG nova.network.neutron [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.153020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.197725] env[68285]: DEBUG nova.compute.utils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.199744] env[68285]: DEBUG nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.199948] env[68285]: DEBUG nova.network.neutron [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.228505] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.228829] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.229025] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.389422] env[68285]: DEBUG nova.policy [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb08b9cb707a4ba19622181d7019146e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47bd2fd2c1f743e8a6a82e64a7c834e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 761.452205] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891028, 'name': MoveDatastoreFile_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.558852] env[68285]: DEBUG nova.network.neutron [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.703936] env[68285]: DEBUG nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 761.958495] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891028, 'name': MoveDatastoreFile_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.985550] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d232e0-280a-495e-abfb-a9908b94f852 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.998539] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b4b7fc-a0a8-4bb8-bd0a-b693e539426b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.046922] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144c6a5d-7bae-4fa7-865e-9c289d325f63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.062676] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11736988-203b-4c31-b1e4-5b00fbfa7793 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.085482] env[68285]: DEBUG nova.compute.provider_tree [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.116786] env[68285]: DEBUG nova.network.neutron [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance_info_cache with network_info: [{"id": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "address": "fa:16:3e:f3:99:c3", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84cbe58d-a7", "ovs_interfaceid": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.458053] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891028, 'name': MoveDatastoreFile_Task, 'duration_secs': 1.061467} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.458467] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] File moved {{(pid=68285) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 762.458597] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Cleaning up location [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 762.458819] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Deleting the datastore file [datastore2] vmware_temp/4f6909b3-e315-4a66-b41f-fda46a07472a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.459193] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8da68c86-77df-4ec1-aa26-79aafe8f86d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.469269] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 762.469269] env[68285]: value = "task-2891029" [ 762.469269] env[68285]: _type = "Task" [ 762.469269] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.483481] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891029, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.520281] env[68285]: DEBUG nova.network.neutron [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Successfully updated port: ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.588969] env[68285]: DEBUG nova.scheduler.client.report [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.623078] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.623078] env[68285]: DEBUG nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Instance network_info: |[{"id": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "address": "fa:16:3e:f3:99:c3", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84cbe58d-a7", "ovs_interfaceid": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 762.623564] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:99:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84cbe58d-a7c4-4c42-9f87-9a6b62805b10', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.635376] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Creating folder: Project (f891a62d3df3400fa53ac94230bcb8a9). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.635952] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f95f8626-282b-4467-a147-ef87d456f1bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.656050] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Created folder: Project (f891a62d3df3400fa53ac94230bcb8a9) in parent group-v580775. [ 762.657764] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Creating folder: Instances. Parent ref: group-v580794. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.657764] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2df3a10a-34cc-4e5c-8f33-6aaf5dcc1656 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.663021] env[68285]: DEBUG nova.network.neutron [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Successfully created port: a9adbb18-e996-4b1b-af89-73aa9fe32c71 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.680058] env[68285]: DEBUG nova.compute.manager [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Received event network-changed-9f5b021e-af4a-40de-ac20-e018f2923ae7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 762.680274] env[68285]: DEBUG nova.compute.manager [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Refreshing instance network info cache due to event network-changed-9f5b021e-af4a-40de-ac20-e018f2923ae7. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 762.680651] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Acquiring lock "refresh_cache-f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.680721] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Acquired lock "refresh_cache-f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.680864] env[68285]: DEBUG nova.network.neutron [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Refreshing network info cache for port 9f5b021e-af4a-40de-ac20-e018f2923ae7 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 762.687728] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Created folder: Instances in parent group-v580794. [ 762.688109] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 762.688527] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 762.688753] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9751d77-3d89-41dd-b3e6-dabe2870b233 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.720202] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.720202] env[68285]: value = "task-2891032" [ 762.720202] env[68285]: _type = "Task" [ 762.720202] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.720842] env[68285]: DEBUG nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 762.736025] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.736025] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.738592] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891032, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.772099] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 762.772448] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.772676] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 762.772939] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.773167] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 762.773380] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 762.773701] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 762.773925] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 762.774192] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 762.774435] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 762.774717] env[68285]: DEBUG nova.virt.hardware [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 762.777362] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1ee8bc-d600-4f3c-932c-9c4e268a9687 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.793822] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f505d4-0caf-454e-993f-d3b3c83c798f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.987838] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891029, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034156} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.988114] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 762.988837] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edeb5a2a-1a44-4811-99fe-dbf660add94b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.998051] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 762.998051] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c7c066-c6f7-9587-ea10-3be396009c76" [ 762.998051] env[68285]: _type = "Task" [ 762.998051] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.014057] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c7c066-c6f7-9587-ea10-3be396009c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.024451] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.024533] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.024767] env[68285]: DEBUG nova.network.neutron [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.098217] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.417s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.098732] env[68285]: DEBUG nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.102150] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.738s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.237714] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891032, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.614523] env[68285]: DEBUG nova.compute.utils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 763.622160] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c7c066-c6f7-9587-ea10-3be396009c76, 'name': SearchDatastore_Task, 'duration_secs': 0.027651} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.622954] env[68285]: DEBUG nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 763.623781] env[68285]: DEBUG nova.network.neutron [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 763.625605] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.626700] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 682c3b6e-a605-486a-86c8-af173d80cbcf/682c3b6e-a605-486a-86c8-af173d80cbcf.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.626700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.627497] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.627497] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc7dc639-27b6-4273-b283-5bdda8979a52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.634148] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b73f3447-8457-40c7-8285-1323e826f67b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.645262] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 763.645262] env[68285]: value = "task-2891033" [ 763.645262] env[68285]: _type = "Task" [ 763.645262] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.656673] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.656673] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 763.659311] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af32e61a-e113-416e-b765-06e0bb69d6d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.665497] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891033, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.671794] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 763.671794] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52429419-76af-cc10-dbdf-150b590c9a68" [ 763.671794] env[68285]: _type = "Task" [ 763.671794] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.683386] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52429419-76af-cc10-dbdf-150b590c9a68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.737168] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891032, 'name': CreateVM_Task, 'duration_secs': 0.995974} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.737168] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.737168] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.737168] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.737168] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 763.737631] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa629c1f-5519-4f2e-9327-25da92c0e0ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.751297] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 763.751297] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f496ea-764a-daea-3650-a38302dfc8b1" [ 763.751297] env[68285]: _type = "Task" [ 763.751297] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.773058] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f496ea-764a-daea-3650-a38302dfc8b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010535} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.774785] env[68285]: DEBUG nova.network.neutron [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.777258] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.777711] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 763.778045] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.803456] env[68285]: DEBUG nova.policy [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e07beda91348ff873672d51166c05e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53261bb9432948b58692227101a4717b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 763.860435] env[68285]: DEBUG nova.network.neutron [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Updated VIF entry in instance network info cache for port 9f5b021e-af4a-40de-ac20-e018f2923ae7. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 763.860967] env[68285]: DEBUG nova.network.neutron [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Updating instance_info_cache with network_info: [{"id": "9f5b021e-af4a-40de-ac20-e018f2923ae7", "address": "fa:16:3e:60:f5:49", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5b021e-af", "ovs_interfaceid": "9f5b021e-af4a-40de-ac20-e018f2923ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.123746] env[68285]: DEBUG nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 764.163634] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891033, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.166080] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance e28d0927-17c2-4256-93d4-ef0cc2c9b92a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166080] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance ef0636f4-3149-44e8-a4a3-62b9ede5dc28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166080] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 105f0ad6-1591-40b9-997c-280860bd6501 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166080] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166480] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 682c3b6e-a605-486a-86c8-af173d80cbcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166480] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f26a5b02-c71f-4f04-a8b2-4e284a6e37a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166598] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a97df3d2-c182-46d8-95c2-61caccade285 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166793] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 52fbfbe4-1807-4d6d-9139-ebe30e6bf647 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.166916] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance fe9a8a13-73ec-4556-a62c-cc49fd01f539 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.167434] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 11de7da5-1d73-4536-b2a1-f7dbbdec14b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.187879] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52429419-76af-cc10-dbdf-150b590c9a68, 'name': SearchDatastore_Task, 'duration_secs': 0.013051} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.188804] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f974bf9-d93a-4587-b2f9-ad8fb1e1d59b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.200450] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 764.200450] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f47748-e629-bbf9-f4df-949ad4f73aae" [ 764.200450] env[68285]: _type = "Task" [ 764.200450] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.218437] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f47748-e629-bbf9-f4df-949ad4f73aae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.370285] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Releasing lock "refresh_cache-f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.370285] env[68285]: DEBUG nova.compute.manager [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Received event network-vif-plugged-84cbe58d-a7c4-4c42-9f87-9a6b62805b10 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 764.370285] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Acquiring lock "a97df3d2-c182-46d8-95c2-61caccade285-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.370285] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Lock "a97df3d2-c182-46d8-95c2-61caccade285-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.370285] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Lock "a97df3d2-c182-46d8-95c2-61caccade285-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.370474] env[68285]: DEBUG nova.compute.manager [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] No waiting events found dispatching network-vif-plugged-84cbe58d-a7c4-4c42-9f87-9a6b62805b10 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 764.370474] env[68285]: WARNING nova.compute.manager [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Received unexpected event network-vif-plugged-84cbe58d-a7c4-4c42-9f87-9a6b62805b10 for instance with vm_state building and task_state spawning. [ 764.370474] env[68285]: DEBUG nova.compute.manager [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Received event network-changed-84cbe58d-a7c4-4c42-9f87-9a6b62805b10 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 764.370474] env[68285]: DEBUG nova.compute.manager [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Refreshing instance network info cache due to event network-changed-84cbe58d-a7c4-4c42-9f87-9a6b62805b10. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 764.370474] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Acquiring lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.370634] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Acquired lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.370634] env[68285]: DEBUG nova.network.neutron [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Refreshing network info cache for port 84cbe58d-a7c4-4c42-9f87-9a6b62805b10 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 764.437522] env[68285]: DEBUG nova.network.neutron [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Updating instance_info_cache with network_info: [{"id": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "address": "fa:16:3e:94:fb:43", "network": {"id": "d9bb1a73-a8c2-4023-87f2-76bdb79f714a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-108279850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb8e48ceae0748b0b8c762ab7303a4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae1e3da0-ad", "ovs_interfaceid": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.670185] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891033, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.675211] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance ec89a2a4-3bfc-45c5-b7f2-239b52995d6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.713842] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f47748-e629-bbf9-f4df-949ad4f73aae, 'name': SearchDatastore_Task, 'duration_secs': 0.015773} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.716224] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.716224] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f26a5b02-c71f-4f04-a8b2-4e284a6e37a6/f26a5b02-c71f-4f04-a8b2-4e284a6e37a6.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.716404] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.716573] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.716746] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45860ec8-945d-4870-b2ab-286580ca2f9c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.723358] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f989aa1-9d4f-4d4f-9a31-5a6d189b64e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.732809] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 764.732809] env[68285]: value = "task-2891034" [ 764.732809] env[68285]: _type = "Task" [ 764.732809] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.737998] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.738896] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.739888] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ead429c-4c85-44a2-aaab-2c01e73b59e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.755086] env[68285]: DEBUG nova.compute.manager [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.755436] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.758997] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7cf6b2-b163-43b9-9830-27462dac591b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.763158] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 764.763158] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523b13e9-83d6-2718-c43f-2889b5bb64a2" [ 764.763158] env[68285]: _type = "Task" [ 764.763158] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.776663] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523b13e9-83d6-2718-c43f-2889b5bb64a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.832394] env[68285]: DEBUG nova.network.neutron [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Successfully updated port: a9adbb18-e996-4b1b-af89-73aa9fe32c71 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.915861] env[68285]: DEBUG nova.network.neutron [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Successfully created port: 3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.942678] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Releasing lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.943313] env[68285]: DEBUG nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Instance network_info: |[{"id": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "address": "fa:16:3e:94:fb:43", "network": {"id": "d9bb1a73-a8c2-4023-87f2-76bdb79f714a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-108279850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb8e48ceae0748b0b8c762ab7303a4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae1e3da0-ad", "ovs_interfaceid": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 764.943841] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:fb:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '713e54d5-283f-493d-b003-f13182deaf7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae1e3da0-addf-4feb-83f8-8a52e6a74a39', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.953795] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Creating folder: Project (eb8e48ceae0748b0b8c762ab7303a4b7). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.955242] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13e5092a-eb09-44c7-9e52-8cc571603734 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.973435] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Created folder: Project (eb8e48ceae0748b0b8c762ab7303a4b7) in parent group-v580775. [ 764.973435] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Creating folder: Instances. Parent ref: group-v580797. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.973435] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0be0c54d-c055-4401-b515-347b6b32654c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.987037] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Created folder: Instances in parent group-v580797. [ 764.987324] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.987585] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.987750] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-895c6842-4c8f-47ae-84d5-401d2c04a567 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.018406] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.018406] env[68285]: value = "task-2891037" [ 765.018406] env[68285]: _type = "Task" [ 765.018406] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.031069] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891037, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.139810] env[68285]: DEBUG nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 765.169348] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891033, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.177162] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.177380] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.177696] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.178026] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.178333] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.178596] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.178999] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.179310] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.179623] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.179905] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.180218] env[68285]: DEBUG nova.virt.hardware [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.181214] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379607ac-780e-4154-bc62-0778a282cbe9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.184532] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 60144efd-061e-4144-9541-b2321c9b0ec1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.193671] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6b1f3a-1df8-4af7-830c-488b37b33665 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.245899] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.275136] env[68285]: INFO nova.compute.manager [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] instance snapshotting [ 765.276772] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523b13e9-83d6-2718-c43f-2889b5bb64a2, 'name': SearchDatastore_Task, 'duration_secs': 0.026574} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.277854] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56402492-3c33-47a0-98f5-c2afa4f1c503 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.280692] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de60882-9c75-47bf-934f-d0b81fdfd242 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.302880] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8eaffa1-9083-4d50-811c-0b56d22a7ae1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.306107] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 765.306107] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dc1154-72fb-f021-70cb-95970086ea5b" [ 765.306107] env[68285]: _type = "Task" [ 765.306107] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.321303] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52dc1154-72fb-f021-70cb-95970086ea5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.327981] env[68285]: DEBUG nova.network.neutron [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updated VIF entry in instance network info cache for port 84cbe58d-a7c4-4c42-9f87-9a6b62805b10. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 765.327981] env[68285]: DEBUG nova.network.neutron [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance_info_cache with network_info: [{"id": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "address": "fa:16:3e:f3:99:c3", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84cbe58d-a7", "ovs_interfaceid": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.338649] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "refresh_cache-fe9a8a13-73ec-4556-a62c-cc49fd01f539" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.338649] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired lock "refresh_cache-fe9a8a13-73ec-4556-a62c-cc49fd01f539" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.338649] env[68285]: DEBUG nova.network.neutron [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.464986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Acquiring lock "105f0ad6-1591-40b9-997c-280860bd6501" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.464986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Lock "105f0ad6-1591-40b9-997c-280860bd6501" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.464986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Acquiring lock "105f0ad6-1591-40b9-997c-280860bd6501-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.464986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Lock "105f0ad6-1591-40b9-997c-280860bd6501-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.466211] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Lock "105f0ad6-1591-40b9-997c-280860bd6501-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.466824] env[68285]: INFO nova.compute.manager [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Terminating instance [ 765.537100] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891037, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.669343] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891033, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.5925} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.669655] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 682c3b6e-a605-486a-86c8-af173d80cbcf/682c3b6e-a605-486a-86c8-af173d80cbcf.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.669923] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.670310] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71420f1f-f89a-472c-8950-ac4a0a72ca18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.689503] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5b58896c-cb07-48c8-ace0-385486a3e19d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.693501] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 765.693501] env[68285]: value = "task-2891038" [ 765.693501] env[68285]: _type = "Task" [ 765.693501] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.708122] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891038, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.749457] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891034, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.819525] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 765.819900] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-32888f78-b398-4668-b0be-54f2ddb4e014 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.834371] env[68285]: DEBUG oslo_concurrency.lockutils [req-360e8f49-31dc-4933-8447-bcc333cb7a89 req-1c4d98d8-9b5a-4bd1-8f75-a0408466b0d1 service nova] Releasing lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.834917] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52dc1154-72fb-f021-70cb-95970086ea5b, 'name': SearchDatastore_Task, 'duration_secs': 0.035188} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.837835] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.837835] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a97df3d2-c182-46d8-95c2-61caccade285/a97df3d2-c182-46d8-95c2-61caccade285.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 765.837835] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 765.837835] env[68285]: value = "task-2891039" [ 765.837835] env[68285]: _type = "Task" [ 765.837835] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.841213] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90d957f3-373c-45a0-955b-cb278692e32d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.853263] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891039, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.855129] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 765.855129] env[68285]: value = "task-2891040" [ 765.855129] env[68285]: _type = "Task" [ 765.855129] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.867826] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.879063] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Received event network-vif-plugged-ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.879298] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Acquiring lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.879505] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.879750] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.879856] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] No waiting events found dispatching network-vif-plugged-ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 765.880051] env[68285]: WARNING nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Received unexpected event network-vif-plugged-ae1e3da0-addf-4feb-83f8-8a52e6a74a39 for instance with vm_state building and task_state spawning. [ 765.880216] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Received event network-changed-56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.880363] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Refreshing instance network info cache due to event network-changed-56efc1e7-b396-4ba4-8104-803f5f018f35. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 765.880564] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Acquiring lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.880699] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Acquired lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.880863] env[68285]: DEBUG nova.network.neutron [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Refreshing network info cache for port 56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 765.940192] env[68285]: DEBUG nova.network.neutron [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.975898] env[68285]: DEBUG nova.compute.manager [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 765.976927] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 765.977168] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea801ad5-bb25-49ea-97d5-b25fef64bc02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.987611] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 765.987836] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-719763e6-a17d-45df-a30b-06a85940db1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.997787] env[68285]: DEBUG oslo_vmware.api [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Waiting for the task: (returnval){ [ 765.997787] env[68285]: value = "task-2891041" [ 765.997787] env[68285]: _type = "Task" [ 765.997787] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.008324] env[68285]: DEBUG oslo_vmware.api [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Task: {'id': task-2891041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.032874] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891037, 'name': CreateVM_Task, 'duration_secs': 0.656682} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.033181] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 766.034368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.034368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.034606] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 766.035412] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c375f89f-bd40-4270-bde8-e46cede94d66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.042974] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 766.042974] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52706dc2-8a8e-8bbe-9363-6d8e0203e8a0" [ 766.042974] env[68285]: _type = "Task" [ 766.042974] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.053205] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52706dc2-8a8e-8bbe-9363-6d8e0203e8a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.197675] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 12fad42a-1011-4563-b11f-7b141b2a1670 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.215106] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891038, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184256} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.215106] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 766.216920] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b22e4db-e691-4d76-a1c2-f045642fd9ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.247026] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 682c3b6e-a605-486a-86c8-af173d80cbcf/682c3b6e-a605-486a-86c8-af173d80cbcf.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.253572] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28a280de-94d5-4b4a-81f9-8a3e307f82c2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.274158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.274405] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.285933] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891034, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.132792} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.287192] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f26a5b02-c71f-4f04-a8b2-4e284a6e37a6/f26a5b02-c71f-4f04-a8b2-4e284a6e37a6.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 766.288102] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 766.288565] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 766.288565] env[68285]: value = "task-2891042" [ 766.288565] env[68285]: _type = "Task" [ 766.288565] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.288921] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d72021a2-accf-44e8-a23e-6e0f48b1cf5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.304398] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891042, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.306430] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 766.306430] env[68285]: value = "task-2891043" [ 766.306430] env[68285]: _type = "Task" [ 766.306430] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.318767] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.354250] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891039, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.368064] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513304} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.368064] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a97df3d2-c182-46d8-95c2-61caccade285/a97df3d2-c182-46d8-95c2-61caccade285.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 766.368064] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 766.370039] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e6257ce-542d-499d-aa51-135939e2e6a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.373224] env[68285]: DEBUG nova.network.neutron [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Updating instance_info_cache with network_info: [{"id": "a9adbb18-e996-4b1b-af89-73aa9fe32c71", "address": "fa:16:3e:71:00:ee", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9adbb18-e9", "ovs_interfaceid": "a9adbb18-e996-4b1b-af89-73aa9fe32c71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.383302] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 766.383302] env[68285]: value = "task-2891044" [ 766.383302] env[68285]: _type = "Task" [ 766.383302] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.403771] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.515838] env[68285]: DEBUG oslo_vmware.api [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Task: {'id': task-2891041, 'name': PowerOffVM_Task, 'duration_secs': 0.271419} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.517421] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 766.517421] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 766.517421] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b75759a6-8816-4504-b737-497c0dbed6f0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.554754] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52706dc2-8a8e-8bbe-9363-6d8e0203e8a0, 'name': SearchDatastore_Task, 'duration_secs': 0.073696} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.555064] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.555293] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.555531] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.555670] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.556429] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.557107] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3ec6a16-3197-407b-a988-65afb5be6836 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.566933] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.567381] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 766.567969] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-468e4058-85d6-4a02-b542-171859cb3566 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.574526] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 766.574526] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52754f9d-0e49-de90-50d4-3972fb0678dd" [ 766.574526] env[68285]: _type = "Task" [ 766.574526] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.585877] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52754f9d-0e49-de90-50d4-3972fb0678dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.587902] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 766.588128] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 766.588521] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Deleting the datastore file [datastore1] 105f0ad6-1591-40b9-997c-280860bd6501 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 766.588610] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-281786a7-3dff-48f3-a1b6-0bdb1ebb9ff0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.596664] env[68285]: DEBUG oslo_vmware.api [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Waiting for the task: (returnval){ [ 766.596664] env[68285]: value = "task-2891046" [ 766.596664] env[68285]: _type = "Task" [ 766.596664] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.611915] env[68285]: DEBUG oslo_vmware.api [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Task: {'id': task-2891046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.706107] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 87582063-50f9-4518-ad2d-915c9cd49b19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.801614] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891042, 'name': ReconfigVM_Task, 'duration_secs': 0.36409} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.802065] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 682c3b6e-a605-486a-86c8-af173d80cbcf/682c3b6e-a605-486a-86c8-af173d80cbcf.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.802705] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ca95533-217a-4b23-86c8-5c59d9e474db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.828018] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 766.828018] env[68285]: value = "task-2891047" [ 766.828018] env[68285]: _type = "Task" [ 766.828018] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.832848] env[68285]: DEBUG nova.network.neutron [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Updated VIF entry in instance network info cache for port 56efc1e7-b396-4ba4-8104-803f5f018f35. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 766.833518] env[68285]: DEBUG nova.network.neutron [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Updating instance_info_cache with network_info: [{"id": "56efc1e7-b396-4ba4-8104-803f5f018f35", "address": "fa:16:3e:b6:95:74", "network": {"id": "8cd87e17-9031-47ac-b8d9-60c9036412d9", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-424993255-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd008f6669ed4e65919a8125d2ba8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56efc1e7-b3", "ovs_interfaceid": "56efc1e7-b396-4ba4-8104-803f5f018f35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.835138] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098739} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.840347] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 766.840347] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccad3a0-0f1b-4eb3-968f-6402e5b924ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.852038] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891047, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.870521] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] f26a5b02-c71f-4f04-a8b2-4e284a6e37a6/f26a5b02-c71f-4f04-a8b2-4e284a6e37a6.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.875093] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80a83a4e-a11a-4902-b330-48eb7382fb16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.890251] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891039, 'name': CreateSnapshot_Task, 'duration_secs': 0.693779} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.890971] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Releasing lock "refresh_cache-fe9a8a13-73ec-4556-a62c-cc49fd01f539" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.890971] env[68285]: DEBUG nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Instance network_info: |[{"id": "a9adbb18-e996-4b1b-af89-73aa9fe32c71", "address": "fa:16:3e:71:00:ee", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9adbb18-e9", "ovs_interfaceid": "a9adbb18-e996-4b1b-af89-73aa9fe32c71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 766.891312] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 766.891720] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:00:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9adbb18-e996-4b1b-af89-73aa9fe32c71', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.903172] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Creating folder: Project (47bd2fd2c1f743e8a6a82e64a7c834e9). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.907198] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30b7ffd-d72a-4e11-b402-f86b57e8f451 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.910281] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f9b318f-0f63-479a-bfc8-6edb5e5a85b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.913912] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 766.913912] env[68285]: value = "task-2891048" [ 766.913912] env[68285]: _type = "Task" [ 766.913912] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.926745] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07419} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.928831] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 766.929166] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Created folder: Project (47bd2fd2c1f743e8a6a82e64a7c834e9) in parent group-v580775. [ 766.929336] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Creating folder: Instances. Parent ref: group-v580801. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.930100] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc81e930-5716-4382-b575-236b7e6702f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.934181] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da14125b-2c12-480b-ad7b-ee07b1d35757 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.939116] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891048, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.961372] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] a97df3d2-c182-46d8-95c2-61caccade285/a97df3d2-c182-46d8-95c2-61caccade285.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.964242] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-168bd674-7120-4e97-9fd7-cb66e586dcbc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.977421] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Created folder: Instances in parent group-v580801. [ 766.977722] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 766.978697] env[68285]: DEBUG nova.network.neutron [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Successfully updated port: 3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 766.979854] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 766.981739] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d625ce1-34d1-4d20-940d-92f99f2c6e9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.998921] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 766.998921] env[68285]: value = "task-2891051" [ 766.998921] env[68285]: _type = "Task" [ 766.998921] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.005398] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.005398] env[68285]: value = "task-2891052" [ 767.005398] env[68285]: _type = "Task" [ 767.005398] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.009417] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.018685] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891052, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.087331] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52754f9d-0e49-de90-50d4-3972fb0678dd, 'name': SearchDatastore_Task, 'duration_secs': 0.01171} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.088171] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d828256-e7ec-4023-9276-fe48523b230c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.094653] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 767.094653] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52631aaa-bdd0-cc29-5ef1-4a7ad653292e" [ 767.094653] env[68285]: _type = "Task" [ 767.094653] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.110536] env[68285]: DEBUG oslo_vmware.api [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Task: {'id': task-2891046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144397} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.110809] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52631aaa-bdd0-cc29-5ef1-4a7ad653292e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.111135] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 767.111278] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 767.111413] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 767.111584] env[68285]: INFO nova.compute.manager [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Took 1.14 seconds to destroy the instance on the hypervisor. [ 767.111822] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 767.112011] env[68285]: DEBUG nova.compute.manager [-] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 767.112181] env[68285]: DEBUG nova.network.neutron [-] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.210078] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9f4b2b94-ec19-4a8e-8663-ab71c417d093 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.210344] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 767.210519] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 767.340529] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Releasing lock "refresh_cache-e28d0927-17c2-4256-93d4-ef0cc2c9b92a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.340836] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Received event network-changed-ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 767.341157] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Refreshing instance network info cache due to event network-changed-ae1e3da0-addf-4feb-83f8-8a52e6a74a39. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 767.341395] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Acquiring lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.341577] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Acquired lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.341646] env[68285]: DEBUG nova.network.neutron [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Refreshing network info cache for port ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.352119] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891047, 'name': Rename_Task, 'duration_secs': 0.170186} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.352119] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.352421] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-375b694c-9203-4eae-84cf-20d82b71fecc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.366160] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 767.366160] env[68285]: value = "task-2891053" [ 767.366160] env[68285]: _type = "Task" [ 767.366160] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.376713] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891053, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.448330] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 767.449050] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891048, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.451884] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-239bca8d-1400-4618-bb00-cbc63ddd6360 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.461876] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 767.461876] env[68285]: value = "task-2891054" [ 767.461876] env[68285]: _type = "Task" [ 767.461876] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.473134] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891054, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.482977] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.483742] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.483742] env[68285]: DEBUG nova.network.neutron [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.517173] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891051, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.532854] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891052, 'name': CreateVM_Task, 'duration_secs': 0.397414} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.533053] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.533801] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.534117] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.534261] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.534524] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f99bae97-e3c0-459c-a4fc-2d5875f207bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.542622] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 767.542622] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52599c3f-41bb-58bb-63f1-d31c05958ccd" [ 767.542622] env[68285]: _type = "Task" [ 767.542622] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.554288] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52599c3f-41bb-58bb-63f1-d31c05958ccd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.576967] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8285158b-f8df-4faf-9050-6c660cb30b14 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.589625] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34889b0-4e15-440d-b60b-15dd4c7ea459 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.628973] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a8e837-e944-4101-9890-8d656e37b7a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.635882] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52631aaa-bdd0-cc29-5ef1-4a7ad653292e, 'name': SearchDatastore_Task, 'duration_secs': 0.045975} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.636576] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.636870] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 52fbfbe4-1807-4d6d-9139-ebe30e6bf647/52fbfbe4-1807-4d6d-9139-ebe30e6bf647.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 767.637179] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-750db919-b54c-47ce-9a20-4c391c803e61 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.643969] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117bdd90-e763-412a-8153-a885e29ad08c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.651302] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 767.651302] env[68285]: value = "task-2891055" [ 767.651302] env[68285]: _type = "Task" [ 767.651302] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.664184] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.673968] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.884309] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891053, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.940143] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891048, 'name': ReconfigVM_Task, 'duration_secs': 0.635837} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.940143] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Reconfigured VM instance instance-00000006 to attach disk [datastore2] f26a5b02-c71f-4f04-a8b2-4e284a6e37a6/f26a5b02-c71f-4f04-a8b2-4e284a6e37a6.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.941526] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-600a5282-fd09-48b3-9425-a1685bcb26af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.956087] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 767.956087] env[68285]: value = "task-2891056" [ 767.956087] env[68285]: _type = "Task" [ 767.956087] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.970410] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891056, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.977634] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891054, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.014895] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.015069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.029257] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891051, 'name': ReconfigVM_Task, 'duration_secs': 0.561849} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.029257] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Reconfigured VM instance instance-00000007 to attach disk [datastore2] a97df3d2-c182-46d8-95c2-61caccade285/a97df3d2-c182-46d8-95c2-61caccade285.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.029886] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41c77de8-10b8-4602-8b58-fd7e418caa4a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.042713] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 768.042713] env[68285]: value = "task-2891057" [ 768.042713] env[68285]: _type = "Task" [ 768.042713] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.059903] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891057, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.059903] env[68285]: DEBUG nova.network.neutron [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.065996] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52599c3f-41bb-58bb-63f1-d31c05958ccd, 'name': SearchDatastore_Task, 'duration_secs': 0.02571} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.066393] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.066670] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.066905] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.067167] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.067463] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.067889] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0c0a8f6-52f2-4372-b241-c2c50ea91434 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.085448] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.085697] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.087174] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5abacb34-e87b-420a-9e14-c28698b3d695 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.101379] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 768.101379] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bfaf21-41ac-e8d2-1325-e11ae4c8eef6" [ 768.101379] env[68285]: _type = "Task" [ 768.101379] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.117030] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bfaf21-41ac-e8d2-1325-e11ae4c8eef6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.119575] env[68285]: DEBUG nova.compute.manager [req-6b5c3d4c-86fb-4774-b608-80624b0b32f0 req-978bb3bd-c8b6-4c81-86c6-3d34c3160de6 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received event network-vif-plugged-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.119575] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b5c3d4c-86fb-4774-b608-80624b0b32f0 req-978bb3bd-c8b6-4c81-86c6-3d34c3160de6 service nova] Acquiring lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.119792] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b5c3d4c-86fb-4774-b608-80624b0b32f0 req-978bb3bd-c8b6-4c81-86c6-3d34c3160de6 service nova] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.119946] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b5c3d4c-86fb-4774-b608-80624b0b32f0 req-978bb3bd-c8b6-4c81-86c6-3d34c3160de6 service nova] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.120535] env[68285]: DEBUG nova.compute.manager [req-6b5c3d4c-86fb-4774-b608-80624b0b32f0 req-978bb3bd-c8b6-4c81-86c6-3d34c3160de6 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] No waiting events found dispatching network-vif-plugged-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.120535] env[68285]: WARNING nova.compute.manager [req-6b5c3d4c-86fb-4774-b608-80624b0b32f0 req-978bb3bd-c8b6-4c81-86c6-3d34c3160de6 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received unexpected event network-vif-plugged-3b795cd1-99e2-4a06-9607-e71ca33d19ff for instance with vm_state building and task_state spawning. [ 768.168207] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 768.172689] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891055, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.303199] env[68285]: DEBUG nova.network.neutron [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Updated VIF entry in instance network info cache for port ae1e3da0-addf-4feb-83f8-8a52e6a74a39. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 768.303642] env[68285]: DEBUG nova.network.neutron [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Updating instance_info_cache with network_info: [{"id": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "address": "fa:16:3e:94:fb:43", "network": {"id": "d9bb1a73-a8c2-4023-87f2-76bdb79f714a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-108279850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb8e48ceae0748b0b8c762ab7303a4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae1e3da0-ad", "ovs_interfaceid": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.380482] env[68285]: DEBUG oslo_vmware.api [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891053, 'name': PowerOnVM_Task, 'duration_secs': 0.528668} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.383255] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.384917] env[68285]: INFO nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Took 16.41 seconds to spawn the instance on the hypervisor. [ 768.384917] env[68285]: DEBUG nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.384917] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626c5d76-8cd6-41dd-8bd5-8cdf6b077408 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.421849] env[68285]: DEBUG nova.network.neutron [-] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.469720] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891056, 'name': Rename_Task, 'duration_secs': 0.298445} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.473075] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.473436] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c33bb8c8-33d7-4b72-927d-294773b34194 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.481340] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891054, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.484122] env[68285]: DEBUG nova.network.neutron [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [{"id": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "address": "fa:16:3e:75:c1:88", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b795cd1-99", "ovs_interfaceid": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.485516] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 768.485516] env[68285]: value = "task-2891058" [ 768.485516] env[68285]: _type = "Task" [ 768.485516] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.495811] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.558020] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891057, 'name': Rename_Task, 'duration_secs': 0.273459} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.558020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.558020] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9832e203-4fcd-41f0-b947-f24bdd399e63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.566276] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 768.566276] env[68285]: value = "task-2891059" [ 768.566276] env[68285]: _type = "Task" [ 768.566276] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.575226] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.615180] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bfaf21-41ac-e8d2-1325-e11ae4c8eef6, 'name': SearchDatastore_Task, 'duration_secs': 0.061147} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.616067] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e86593c-c1d4-4a71-b6c8-9bd5e7671f14 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.624179] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 768.624179] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5205c1cf-e361-525c-cafa-decface869dc" [ 768.624179] env[68285]: _type = "Task" [ 768.624179] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.633685] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5205c1cf-e361-525c-cafa-decface869dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.665594] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891055, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608724} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.665862] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 52fbfbe4-1807-4d6d-9139-ebe30e6bf647/52fbfbe4-1807-4d6d-9139-ebe30e6bf647.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.666086] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.666720] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d276dc47-6709-4b86-8f24-3bb51010380d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.677071] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 768.678738] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.576s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.678738] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 768.678738] env[68285]: value = "task-2891060" [ 768.678738] env[68285]: _type = "Task" [ 768.678738] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.678738] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.911s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.679969] env[68285]: INFO nova.compute.claims [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.697199] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891060, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.807696] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Releasing lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.808026] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Received event network-vif-plugged-a9adbb18-e996-4b1b-af89-73aa9fe32c71 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.809415] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Acquiring lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.809415] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.809415] env[68285]: DEBUG oslo_concurrency.lockutils [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.809627] env[68285]: DEBUG nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] No waiting events found dispatching network-vif-plugged-a9adbb18-e996-4b1b-af89-73aa9fe32c71 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.809787] env[68285]: WARNING nova.compute.manager [req-2d423dcd-a176-4d02-8483-767e42b4fc43 req-1f0694ba-e3a3-4fd8-b10c-c2171fe5009c service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Received unexpected event network-vif-plugged-a9adbb18-e996-4b1b-af89-73aa9fe32c71 for instance with vm_state building and task_state spawning. [ 768.908254] env[68285]: INFO nova.compute.manager [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Took 26.85 seconds to build instance. [ 768.928442] env[68285]: INFO nova.compute.manager [-] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Took 1.82 seconds to deallocate network for instance. [ 768.985952] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891054, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.990357] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Releasing lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.990653] env[68285]: DEBUG nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Instance network_info: |[{"id": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "address": "fa:16:3e:75:c1:88", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b795cd1-99", "ovs_interfaceid": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 768.994562] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:c1:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2c019b6-3ef3-4c8f-95bd-edede2c554a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b795cd1-99e2-4a06-9607-e71ca33d19ff', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.009782] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Creating folder: Project (53261bb9432948b58692227101a4717b). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.015429] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2d34836-c566-49f9-b101-14642ba03a53 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.021740] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891058, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.035027] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Created folder: Project (53261bb9432948b58692227101a4717b) in parent group-v580775. [ 769.035407] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Creating folder: Instances. Parent ref: group-v580805. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.036454] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c09aec9-9edf-4be4-a0a3-e89aebaf4a4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.053718] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Created folder: Instances in parent group-v580805. [ 769.053718] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.053971] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.054146] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19127f4e-7ab1-4046-9900-44f6388ecb18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.083707] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891059, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.085563] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.085563] env[68285]: value = "task-2891063" [ 769.085563] env[68285]: _type = "Task" [ 769.085563] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.095740] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891063, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.140047] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5205c1cf-e361-525c-cafa-decface869dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011848} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.140047] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.140047] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] fe9a8a13-73ec-4556-a62c-cc49fd01f539/fe9a8a13-73ec-4556-a62c-cc49fd01f539.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 769.140047] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45799dc2-b7e7-4f65-9cf7-8a161ef009d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.151538] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 769.151538] env[68285]: value = "task-2891064" [ 769.151538] env[68285]: _type = "Task" [ 769.151538] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.165125] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.201000] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891060, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160779} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.202194] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.203135] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3b22e7-c84a-4d82-8589-3384a6433781 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.229325] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 52fbfbe4-1807-4d6d-9139-ebe30e6bf647/52fbfbe4-1807-4d6d-9139-ebe30e6bf647.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.230093] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-994fce79-bbe8-443d-8a41-b7ec83d0ada6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.251976] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 769.251976] env[68285]: value = "task-2891065" [ 769.251976] env[68285]: _type = "Task" [ 769.251976] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.267412] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891065, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.351637] env[68285]: DEBUG nova.compute.manager [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Received event network-changed-a9adbb18-e996-4b1b-af89-73aa9fe32c71 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 769.353240] env[68285]: DEBUG nova.compute.manager [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Refreshing instance network info cache due to event network-changed-a9adbb18-e996-4b1b-af89-73aa9fe32c71. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 769.353240] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] Acquiring lock "refresh_cache-fe9a8a13-73ec-4556-a62c-cc49fd01f539" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.353240] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] Acquired lock "refresh_cache-fe9a8a13-73ec-4556-a62c-cc49fd01f539" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.353457] env[68285]: DEBUG nova.network.neutron [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Refreshing network info cache for port a9adbb18-e996-4b1b-af89-73aa9fe32c71 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.411903] env[68285]: DEBUG oslo_concurrency.lockutils [None req-197fc6f7-0fa0-4992-958f-7304f09524c7 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.357s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.439827] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.482321] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891054, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.498754] env[68285]: DEBUG oslo_vmware.api [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891058, 'name': PowerOnVM_Task, 'duration_secs': 0.608342} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.498754] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 769.498986] env[68285]: INFO nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Took 15.18 seconds to spawn the instance on the hypervisor. [ 769.499044] env[68285]: DEBUG nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.499927] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209e5071-d10f-4ae1-aad3-2e3f7c0fb891 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.584461] env[68285]: DEBUG oslo_vmware.api [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891059, 'name': PowerOnVM_Task, 'duration_secs': 0.589979} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.585072] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 769.585285] env[68285]: INFO nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Took 11.68 seconds to spawn the instance on the hypervisor. [ 769.585473] env[68285]: DEBUG nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.586926] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2912137e-fa05-4ef4-b017-12ec03fbd31b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.611196] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891063, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.664521] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50924} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.664863] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] fe9a8a13-73ec-4556-a62c-cc49fd01f539/fe9a8a13-73ec-4556-a62c-cc49fd01f539.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.665147] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.665441] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-577fc147-9645-4316-8af0-a9ab2e2b7d5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.675191] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 769.675191] env[68285]: value = "task-2891066" [ 769.675191] env[68285]: _type = "Task" [ 769.675191] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.685226] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891066, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.765871] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.917191] env[68285]: DEBUG nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 769.982554] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891054, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.028095] env[68285]: INFO nova.compute.manager [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Took 27.37 seconds to build instance. [ 770.056622] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535d2a78-90c5-4719-9fe0-a6871b96d0c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.068610] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453d7e27-ad79-4d29-aa16-ad18d199cb96 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.110272] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2384a997-f346-4d86-920c-09e90d49e9d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.128832] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891063, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.129450] env[68285]: INFO nova.compute.manager [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Took 27.44 seconds to build instance. [ 770.131617] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83b89a1-aa27-4f71-841b-1f291500187c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.148437] env[68285]: DEBUG nova.compute.provider_tree [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.192300] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891066, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069968} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.192300] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.192300] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fef111-c9a0-488b-abac-f36ebf33490e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.218867] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] fe9a8a13-73ec-4556-a62c-cc49fd01f539/fe9a8a13-73ec-4556-a62c-cc49fd01f539.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.219552] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29a355c1-ec85-48ee-b86c-afb9da904355 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.241192] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 770.241192] env[68285]: value = "task-2891067" [ 770.241192] env[68285]: _type = "Task" [ 770.241192] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.253128] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891067, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.263841] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891065, 'name': ReconfigVM_Task, 'duration_secs': 0.638796} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.264142] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 52fbfbe4-1807-4d6d-9139-ebe30e6bf647/52fbfbe4-1807-4d6d-9139-ebe30e6bf647.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.264918] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e682963-353a-4401-8fd4-5058f59d3a4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.274461] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 770.274461] env[68285]: value = "task-2891068" [ 770.274461] env[68285]: _type = "Task" [ 770.274461] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.286947] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891068, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.295705] env[68285]: DEBUG nova.network.neutron [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Updated VIF entry in instance network info cache for port a9adbb18-e996-4b1b-af89-73aa9fe32c71. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 770.296154] env[68285]: DEBUG nova.network.neutron [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Updating instance_info_cache with network_info: [{"id": "a9adbb18-e996-4b1b-af89-73aa9fe32c71", "address": "fa:16:3e:71:00:ee", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9adbb18-e9", "ovs_interfaceid": "a9adbb18-e996-4b1b-af89-73aa9fe32c71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.446748] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.482815] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891054, 'name': CloneVM_Task, 'duration_secs': 2.869057} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.483251] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Created linked-clone VM from snapshot [ 770.484057] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7478696-4511-4923-b39e-792b7caeb315 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.494308] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Uploading image 40c3a6e3-79ad-4702-9fa4-d2bbe51f9e6f {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 770.525832] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 770.525832] env[68285]: value = "vm-580804" [ 770.525832] env[68285]: _type = "VirtualMachine" [ 770.525832] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 770.526111] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-17e387ad-597f-4e30-ad29-e7bf31969cf6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.530019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-818a4c7e-4ece-429b-9af4-9dff24efe813 tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.883s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.536758] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lease: (returnval){ [ 770.536758] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523d5b03-09e3-6265-b228-bdca3c7f45fd" [ 770.536758] env[68285]: _type = "HttpNfcLease" [ 770.536758] env[68285]: } obtained for exporting VM: (result){ [ 770.536758] env[68285]: value = "vm-580804" [ 770.536758] env[68285]: _type = "VirtualMachine" [ 770.536758] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 770.536985] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the lease: (returnval){ [ 770.536985] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523d5b03-09e3-6265-b228-bdca3c7f45fd" [ 770.536985] env[68285]: _type = "HttpNfcLease" [ 770.536985] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 770.545546] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 770.545546] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523d5b03-09e3-6265-b228-bdca3c7f45fd" [ 770.545546] env[68285]: _type = "HttpNfcLease" [ 770.545546] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 770.620177] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891063, 'name': CreateVM_Task, 'duration_secs': 1.457256} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.620406] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.621224] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.621412] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.621805] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.621962] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7f3c953-28b9-42e8-9574-afacd7cb4bf0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.630706] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 770.630706] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525de28f-3171-9859-ff89-25c5b9e0fa4f" [ 770.630706] env[68285]: _type = "Task" [ 770.630706] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.638715] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70c0d210-0b37-4f0e-b80e-2435369febf5 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.963s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.639022] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525de28f-3171-9859-ff89-25c5b9e0fa4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.653632] env[68285]: DEBUG nova.scheduler.client.report [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 770.755642] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891067, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.791871] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891068, 'name': Rename_Task, 'duration_secs': 0.295369} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.792801] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.793278] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73a5c57c-6c54-45ed-928b-05031f73a1c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.798651] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9155b02-8165-42fc-b8cb-1e3f6331db2e req-eaf6ced2-2511-4c8f-9695-2f00da3fd5bd service nova] Releasing lock "refresh_cache-fe9a8a13-73ec-4556-a62c-cc49fd01f539" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.803782] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 770.803782] env[68285]: value = "task-2891070" [ 770.803782] env[68285]: _type = "Task" [ 770.803782] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.833451] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.035351] env[68285]: DEBUG nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.047120] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 771.047120] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523d5b03-09e3-6265-b228-bdca3c7f45fd" [ 771.047120] env[68285]: _type = "HttpNfcLease" [ 771.047120] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 771.047400] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 771.047400] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523d5b03-09e3-6265-b228-bdca3c7f45fd" [ 771.047400] env[68285]: _type = "HttpNfcLease" [ 771.047400] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 771.048285] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f9f42e-6893-4f15-a233-94462c2e4f1c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.060067] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fec254-dc2f-fe13-f2b1-c2b83107c1d2/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 771.060262] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fec254-dc2f-fe13-f2b1-c2b83107c1d2/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 771.143023] env[68285]: DEBUG nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.149371] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525de28f-3171-9859-ff89-25c5b9e0fa4f, 'name': SearchDatastore_Task, 'duration_secs': 0.011082} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.150208] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.150303] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.150935] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.150935] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.150935] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.151326] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae8c6a0b-acfa-4e25-9f3a-c40218e02703 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.159418] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.160098] env[68285]: DEBUG nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.166989] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.370s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.168954] env[68285]: INFO nova.compute.claims [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.172389] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.172389] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.172835] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ce953d-68a8-4d87-a39c-05e7f1b5cbf9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.178466] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e1cca0bb-3b16-4ab7-b24d-8f46ca11f2c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.185038] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 771.185038] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521af737-b964-2769-226d-b9c4cd8c375e" [ 771.185038] env[68285]: _type = "Task" [ 771.185038] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.195992] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521af737-b964-2769-226d-b9c4cd8c375e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.257822] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891067, 'name': ReconfigVM_Task, 'duration_secs': 0.656427} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.259081] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Reconfigured VM instance instance-00000009 to attach disk [datastore2] fe9a8a13-73ec-4556-a62c-cc49fd01f539/fe9a8a13-73ec-4556-a62c-cc49fd01f539.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.259845] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1807dd43-4b53-4550-a7bc-71d18ac10082 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.269555] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 771.269555] env[68285]: value = "task-2891071" [ 771.269555] env[68285]: _type = "Task" [ 771.269555] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.286467] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891071, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.318614] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891070, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.549823] env[68285]: DEBUG nova.compute.manager [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 771.549823] env[68285]: DEBUG nova.compute.manager [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing instance network info cache due to event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 771.549823] env[68285]: DEBUG oslo_concurrency.lockutils [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] Acquiring lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.550385] env[68285]: DEBUG oslo_concurrency.lockutils [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] Acquired lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.550752] env[68285]: DEBUG nova.network.neutron [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.581142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.673652] env[68285]: DEBUG nova.compute.utils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 771.686281] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.686704] env[68285]: DEBUG nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 771.686948] env[68285]: DEBUG nova.network.neutron [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.709403] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521af737-b964-2769-226d-b9c4cd8c375e, 'name': SearchDatastore_Task, 'duration_secs': 0.015316} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.710733] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c617b0c8-ad86-4e1e-adb3-b8a5b24b7978 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.724190] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 771.724190] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52812346-58a8-01d6-9a34-53b077074118" [ 771.724190] env[68285]: _type = "Task" [ 771.724190] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.741150] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52812346-58a8-01d6-9a34-53b077074118, 'name': SearchDatastore_Task, 'duration_secs': 0.018159} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.741538] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.741829] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 11de7da5-1d73-4536-b2a1-f7dbbdec14b8/11de7da5-1d73-4536-b2a1-f7dbbdec14b8.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.743281] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dc18873-d853-4f5f-9d1a-cf4c8971832e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.751489] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 771.751489] env[68285]: value = "task-2891072" [ 771.751489] env[68285]: _type = "Task" [ 771.751489] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.765822] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.785605] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891071, 'name': Rename_Task, 'duration_secs': 0.469192} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.785701] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.786844] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f57fb553-5158-4c58-880c-ef3062c6dda4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.791056] env[68285]: DEBUG nova.policy [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70340a4cc3df49ff971f299e439a1581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d03ce152e74cec8910b12d34ad8ba6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 771.799928] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 771.799928] env[68285]: value = "task-2891073" [ 771.799928] env[68285]: _type = "Task" [ 771.799928] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.824672] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891070, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.832610] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.081611] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "81fe4854-1094-4c42-9df5-05325d961146" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.081930] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "81fe4854-1094-4c42-9df5-05325d961146" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.131595] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "ee45231a-80f2-49b9-8bc7-03a0c920a668" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.132133] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.167679] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.168307] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.170089] env[68285]: DEBUG nova.compute.manager [None req-4a08a105-a044-4466-abdb-37c493abc050 tempest-ServerDiagnosticsTest-278930353 tempest-ServerDiagnosticsTest-278930353-project-admin] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.171852] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850fe408-c2ff-4751-a559-d4976c761d2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.182862] env[68285]: INFO nova.compute.manager [None req-4a08a105-a044-4466-abdb-37c493abc050 tempest-ServerDiagnosticsTest-278930353 tempest-ServerDiagnosticsTest-278930353-project-admin] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Retrieving diagnostics [ 772.182862] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9601e7f9-3d0e-446d-a318-a9f33cc73efd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.190749] env[68285]: DEBUG nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.276822] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891072, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.318182] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891073, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.328292] env[68285]: DEBUG oslo_vmware.api [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891070, 'name': PowerOnVM_Task, 'duration_secs': 1.246118} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.330878] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.331143] env[68285]: INFO nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Took 12.05 seconds to spawn the instance on the hypervisor. [ 772.331411] env[68285]: DEBUG nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.332401] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1cae27-3f2f-4ef5-8bc5-55bfc86a7da5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.388967] env[68285]: DEBUG nova.network.neutron [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Successfully created port: 3c317408-dd23-42c9-a837-c59782c5654a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.642567] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328d476c-e086-4fed-a21b-56ab5d43f03b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.651906] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78281b5f-1560-4797-9fb4-2fb0f495b219 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.695891] env[68285]: DEBUG nova.network.neutron [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updated VIF entry in instance network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 772.696564] env[68285]: DEBUG nova.network.neutron [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [{"id": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "address": "fa:16:3e:75:c1:88", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b795cd1-99", "ovs_interfaceid": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.698996] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f12a1e9-b167-4878-a585-7d163f4cc474 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.716022] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69a4ab6-7d02-4548-961d-e030e7fb16e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.738867] env[68285]: DEBUG nova.compute.provider_tree [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.767535] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891072, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635184} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.767654] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 11de7da5-1d73-4536-b2a1-f7dbbdec14b8/11de7da5-1d73-4536-b2a1-f7dbbdec14b8.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.768042] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.768447] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6776dbf1-7dd2-4533-aa5a-6b634dab16d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.778051] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 772.778051] env[68285]: value = "task-2891074" [ 772.778051] env[68285]: _type = "Task" [ 772.778051] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.789807] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891074, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.816178] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891073, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.855117] env[68285]: INFO nova.compute.manager [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Took 26.12 seconds to build instance. [ 773.180024] env[68285]: DEBUG nova.compute.manager [None req-1e726e84-74cb-4346-ae0d-d2db3d8684e0 tempest-ServerDiagnosticsV248Test-390296692 tempest-ServerDiagnosticsV248Test-390296692-project-admin] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 773.180024] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8981d0e7-e6a7-437a-b3a2-50f64dd7f26b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.190046] env[68285]: INFO nova.compute.manager [None req-1e726e84-74cb-4346-ae0d-d2db3d8684e0 tempest-ServerDiagnosticsV248Test-390296692 tempest-ServerDiagnosticsV248Test-390296692-project-admin] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Retrieving diagnostics [ 773.190046] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c2be32-9c42-4936-b7de-cd7dbfde4196 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.222130] env[68285]: DEBUG oslo_concurrency.lockutils [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] Releasing lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.222638] env[68285]: DEBUG nova.compute.manager [req-fdf0f919-ed77-4f1e-9bf8-13278556cebf req-809d0030-8b47-4654-9983-048aa3b3c805 service nova] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Received event network-vif-deleted-b3b48e73-c170-4669-888a-5f674831a535 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.223746] env[68285]: DEBUG nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.242692] env[68285]: DEBUG nova.scheduler.client.report [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.254168] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.254402] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.254573] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.254766] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.254913] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.255049] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.255252] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.255400] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.255570] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.255932] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.256465] env[68285]: DEBUG nova.virt.hardware [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.257928] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5bd5ad-4c45-4064-ab2a-538c9a4c85a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.268706] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69dac5d-b50f-4ea2-94a2-b2c2c2de6f0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.296207] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891074, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087082} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.296543] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.297434] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f189ff3-fcee-47f4-aa9c-aaf3f6a66c64 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.324761] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 11de7da5-1d73-4536-b2a1-f7dbbdec14b8/11de7da5-1d73-4536-b2a1-f7dbbdec14b8.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.325820] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f92ecf67-f3dd-4706-9960-115de5a38be0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.345351] env[68285]: DEBUG oslo_vmware.api [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891073, 'name': PowerOnVM_Task, 'duration_secs': 1.037608} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.346246] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 773.346480] env[68285]: INFO nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Took 10.63 seconds to spawn the instance on the hypervisor. [ 773.346676] env[68285]: DEBUG nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 773.350072] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeaf99e3-c736-44f2-b381-b820b66e3140 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.352547] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 773.352547] env[68285]: value = "task-2891075" [ 773.352547] env[68285]: _type = "Task" [ 773.352547] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.359671] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a0a873e-cf60-4900-bd77-263f2cb9863d tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.632s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.369525] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891075, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.750430] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.750992] env[68285]: DEBUG nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 773.755142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.880s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.760164] env[68285]: INFO nova.compute.claims [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.876275] env[68285]: DEBUG nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.878980] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.882229] env[68285]: INFO nova.compute.manager [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Took 25.14 seconds to build instance. [ 774.264344] env[68285]: DEBUG nova.compute.utils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 774.269995] env[68285]: DEBUG nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 774.273980] env[68285]: DEBUG nova.network.neutron [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 774.370279] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891075, 'name': ReconfigVM_Task, 'duration_secs': 0.638954} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.371890] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 11de7da5-1d73-4536-b2a1-f7dbbdec14b8/11de7da5-1d73-4536-b2a1-f7dbbdec14b8.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.374689] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb8d7f7a-e9c6-4d47-8322-409ed36c5cc3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.386965] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 774.386965] env[68285]: value = "task-2891076" [ 774.386965] env[68285]: _type = "Task" [ 774.386965] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.391399] env[68285]: DEBUG nova.policy [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70340a4cc3df49ff971f299e439a1581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d03ce152e74cec8910b12d34ad8ba6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 774.394105] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d617d724-9e06-427f-b82b-94fe2759a766 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.661s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.422239] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891076, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.431786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.537523] env[68285]: DEBUG nova.compute.manager [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 774.594529] env[68285]: DEBUG nova.network.neutron [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Successfully updated port: 3c317408-dd23-42c9-a837-c59782c5654a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.770503] env[68285]: DEBUG nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 774.908651] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 774.911484] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891076, 'name': Rename_Task, 'duration_secs': 0.219539} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.911987] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 774.918708] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ad45c9b-ce3c-462d-a2d0-bdc223d5cb30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.932511] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 774.932511] env[68285]: value = "task-2891077" [ 774.932511] env[68285]: _type = "Task" [ 774.932511] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.951015] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.960780] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.960930] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.961158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.961352] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.961530] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.971564] env[68285]: INFO nova.compute.manager [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Terminating instance [ 775.072059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.095376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "refresh_cache-ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.095573] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "refresh_cache-ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.095767] env[68285]: DEBUG nova.network.neutron [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.209932] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dd451e-ce27-4df0-add1-3af0aa213317 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.218523] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1eeb64d-a616-45bc-b109-79cfb4664908 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.256641] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2ac027-6d87-4d4f-954a-42130e6f2ad8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.266109] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b800d049-83cc-4b9a-880b-6326415fe82d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.290354] env[68285]: DEBUG nova.compute.provider_tree [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.437062] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.443345] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891077, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.476581] env[68285]: DEBUG nova.compute.manager [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 775.476581] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.477365] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c9d68e-5081-4026-8b80-e84628399e4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.486872] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.486872] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04f910d7-1efd-4aa6-9730-ee504709bc01 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.495528] env[68285]: DEBUG oslo_vmware.api [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 775.495528] env[68285]: value = "task-2891078" [ 775.495528] env[68285]: _type = "Task" [ 775.495528] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.510630] env[68285]: DEBUG oslo_vmware.api [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891078, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.643476] env[68285]: DEBUG nova.network.neutron [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.793518] env[68285]: DEBUG nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 775.799175] env[68285]: DEBUG nova.scheduler.client.report [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.829932] env[68285]: DEBUG nova.network.neutron [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Updating instance_info_cache with network_info: [{"id": "3c317408-dd23-42c9-a837-c59782c5654a", "address": "fa:16:3e:64:d3:c1", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c317408-dd", "ovs_interfaceid": "3c317408-dd23-42c9-a837-c59782c5654a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.831393] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.831593] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.831806] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.831953] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.832130] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.836851] env[68285]: INFO nova.compute.manager [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Terminating instance [ 775.841884] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 775.841884] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 775.841884] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 775.842124] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 775.842314] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 775.842487] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 775.842770] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 775.842844] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 775.843244] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 775.843539] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 775.843748] env[68285]: DEBUG nova.virt.hardware [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 775.844699] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e52fab3-ac3e-4e64-a83f-904e32e330eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.851264] env[68285]: DEBUG nova.compute.manager [req-04621c68-c1f0-4f60-830b-fa8cba3052b1 req-f0d28aaf-b684-4b6b-89b0-dadb9a272c40 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Received event network-vif-plugged-3c317408-dd23-42c9-a837-c59782c5654a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 775.851264] env[68285]: DEBUG oslo_concurrency.lockutils [req-04621c68-c1f0-4f60-830b-fa8cba3052b1 req-f0d28aaf-b684-4b6b-89b0-dadb9a272c40 service nova] Acquiring lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.851264] env[68285]: DEBUG oslo_concurrency.lockutils [req-04621c68-c1f0-4f60-830b-fa8cba3052b1 req-f0d28aaf-b684-4b6b-89b0-dadb9a272c40 service nova] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.851264] env[68285]: DEBUG oslo_concurrency.lockutils [req-04621c68-c1f0-4f60-830b-fa8cba3052b1 req-f0d28aaf-b684-4b6b-89b0-dadb9a272c40 service nova] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.851411] env[68285]: DEBUG nova.compute.manager [req-04621c68-c1f0-4f60-830b-fa8cba3052b1 req-f0d28aaf-b684-4b6b-89b0-dadb9a272c40 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] No waiting events found dispatching network-vif-plugged-3c317408-dd23-42c9-a837-c59782c5654a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 775.851486] env[68285]: WARNING nova.compute.manager [req-04621c68-c1f0-4f60-830b-fa8cba3052b1 req-f0d28aaf-b684-4b6b-89b0-dadb9a272c40 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Received unexpected event network-vif-plugged-3c317408-dd23-42c9-a837-c59782c5654a for instance with vm_state building and task_state spawning. [ 775.859841] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83365b10-66a7-47d3-a3d0-54a389c8b7db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.899462] env[68285]: DEBUG nova.network.neutron [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Successfully created port: 23fd8792-3c59-451c-9424-1043ad4846a0 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.944531] env[68285]: DEBUG oslo_vmware.api [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891077, 'name': PowerOnVM_Task, 'duration_secs': 1.0095} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.944688] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.944911] env[68285]: INFO nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Took 10.80 seconds to spawn the instance on the hypervisor. [ 775.945117] env[68285]: DEBUG nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 775.946155] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cace6e-9632-4bcf-9b7d-cd4b4a140d19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.010250] env[68285]: DEBUG oslo_vmware.api [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891078, 'name': PowerOffVM_Task, 'duration_secs': 0.252965} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.010638] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 776.011123] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 776.011406] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a19ad7b6-1d21-4250-a6ec-c04bfb445b45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.100390] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 776.100390] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 776.101379] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Deleting the datastore file [datastore2] f26a5b02-c71f-4f04-a8b2-4e284a6e37a6 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 776.101379] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c1c8b6d-5fb4-4acb-b332-5a1a796117df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.115889] env[68285]: DEBUG oslo_vmware.api [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for the task: (returnval){ [ 776.115889] env[68285]: value = "task-2891080" [ 776.115889] env[68285]: _type = "Task" [ 776.115889] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.128750] env[68285]: DEBUG oslo_vmware.api [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.305509] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.306265] env[68285]: DEBUG nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 776.309159] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.156s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.311238] env[68285]: INFO nova.compute.claims [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 776.337183] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "refresh_cache-ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.337611] env[68285]: DEBUG nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance network_info: |[{"id": "3c317408-dd23-42c9-a837-c59782c5654a", "address": "fa:16:3e:64:d3:c1", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c317408-dd", "ovs_interfaceid": "3c317408-dd23-42c9-a837-c59782c5654a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 776.338188] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "refresh_cache-9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.338345] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquired lock "refresh_cache-9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.338501] env[68285]: DEBUG nova.network.neutron [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.341913] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:d3:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c317408-dd23-42c9-a837-c59782c5654a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.349888] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating folder: Project (98d03ce152e74cec8910b12d34ad8ba6). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.349888] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c65b5c3-e9b2-4ea4-8e40-3c2f05720737 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.366274] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created folder: Project (98d03ce152e74cec8910b12d34ad8ba6) in parent group-v580775. [ 776.366472] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating folder: Instances. Parent ref: group-v580808. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.366895] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-052c7bcc-e02b-41a5-8ba2-5f331d98764e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.380380] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created folder: Instances in parent group-v580808. [ 776.380535] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.380699] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.380901] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97f1e6f6-40f7-450b-b8ee-0c6c83fb715a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.405151] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.405151] env[68285]: value = "task-2891083" [ 776.405151] env[68285]: _type = "Task" [ 776.405151] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.416579] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891083, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.473602] env[68285]: INFO nova.compute.manager [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Took 25.12 seconds to build instance. [ 776.631862] env[68285]: DEBUG oslo_vmware.api [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Task: {'id': task-2891080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392452} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.632203] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.632386] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 776.632562] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.632818] env[68285]: INFO nova.compute.manager [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 776.634362] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.634772] env[68285]: DEBUG nova.compute.manager [-] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 776.635547] env[68285]: DEBUG nova.network.neutron [-] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.816794] env[68285]: DEBUG nova.compute.utils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 776.829998] env[68285]: DEBUG nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 776.829998] env[68285]: DEBUG nova.network.neutron [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 776.885864] env[68285]: DEBUG nova.network.neutron [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.925617] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891083, 'name': CreateVM_Task, 'duration_secs': 0.494942} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.925804] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.926577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.926747] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.927072] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 776.927338] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a31d9eae-73c8-4f63-a732-253b36079b2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.938763] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 776.938763] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f2fbad-8c10-b445-68ae-9cbd6da2b14a" [ 776.938763] env[68285]: _type = "Task" [ 776.938763] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.948110] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f2fbad-8c10-b445-68ae-9cbd6da2b14a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.974583] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d169a264-725e-4569-b3c9-e32a09a53d16 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.631s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.239060] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a729c9f-815e-49bd-a890-81c5de3eb746 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.248382] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46137794-a988-465e-bbc0-dd1c41c41eab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.284633] env[68285]: DEBUG nova.policy [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2412917acb984a589838e5a1f4557b50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bef2539ec26f4cb5810e41ed80f04860', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 777.286788] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7846295-9ca4-48d0-9098-6f68a92b71db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.296120] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48df9bc1-6827-4233-8c0c-c72daa71d5bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.312730] env[68285]: DEBUG nova.compute.provider_tree [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.324638] env[68285]: DEBUG nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 777.341827] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "682c3b6e-a605-486a-86c8-af173d80cbcf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.342462] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.342673] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "682c3b6e-a605-486a-86c8-af173d80cbcf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.342926] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.343060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.345269] env[68285]: INFO nova.compute.manager [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Terminating instance [ 777.438190] env[68285]: DEBUG nova.network.neutron [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.452152] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f2fbad-8c10-b445-68ae-9cbd6da2b14a, 'name': SearchDatastore_Task, 'duration_secs': 0.020331} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.453316] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.453946] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 777.454274] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.454438] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.454625] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.455174] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27188c4a-e858-4992-9948-812b2cb259d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.467577] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.467820] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 777.468681] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad00e0ce-ef0b-4e3e-bf28-a58c68e3166b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.476456] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 777.476456] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5236ad3b-c9fa-1e6e-84a3-dcfc37cceaef" [ 777.476456] env[68285]: _type = "Task" [ 777.476456] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.480134] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.489373] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5236ad3b-c9fa-1e6e-84a3-dcfc37cceaef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.818682] env[68285]: DEBUG nova.scheduler.client.report [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.850879] env[68285]: DEBUG nova.compute.manager [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 777.853967] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 777.857755] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12795ada-b8e1-4b3b-93d7-1286fe0f5dc3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.873581] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 777.874277] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe8caf98-bac2-4fd6-9d89-aebbf9729075 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.886592] env[68285]: DEBUG oslo_vmware.api [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 777.886592] env[68285]: value = "task-2891084" [ 777.886592] env[68285]: _type = "Task" [ 777.886592] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.904417] env[68285]: DEBUG oslo_vmware.api [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.945455] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Releasing lock "refresh_cache-9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.946104] env[68285]: DEBUG nova.compute.manager [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 777.947135] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 777.949743] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59100419-7e9e-4b2f-b545-264e4fbaa0de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.960944] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 777.960944] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8201acb2-354e-42ef-9d93-2f8e0dd1ef0a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.974532] env[68285]: DEBUG oslo_vmware.api [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 777.974532] env[68285]: value = "task-2891085" [ 777.974532] env[68285]: _type = "Task" [ 777.974532] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.993233] env[68285]: DEBUG oslo_vmware.api [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.000689] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5236ad3b-c9fa-1e6e-84a3-dcfc37cceaef, 'name': SearchDatastore_Task, 'duration_secs': 0.030424} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.000936] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-817be7e3-c1a0-48f0-98a9-2e23955d66a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.010744] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 778.010744] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5276e66f-f2d0-8d18-4f14-44e3f836e0a3" [ 778.010744] env[68285]: _type = "Task" [ 778.010744] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.022128] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5276e66f-f2d0-8d18-4f14-44e3f836e0a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.024448] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.302074] env[68285]: DEBUG nova.network.neutron [-] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.323326] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.014s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.323744] env[68285]: DEBUG nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 778.328815] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.889s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.329121] env[68285]: DEBUG nova.objects.instance [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Lazy-loading 'resources' on Instance uuid 105f0ad6-1591-40b9-997c-280860bd6501 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 778.337451] env[68285]: DEBUG nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 778.370049] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 778.370049] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.370049] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 778.370252] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.370252] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 778.370252] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 778.370252] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 778.370252] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 778.370394] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 778.370394] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 778.370394] env[68285]: DEBUG nova.virt.hardware [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 778.374785] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8eaf18e-8543-498c-b1c5-8671572f05c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.387639] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389fd775-339a-463d-b2ed-b59e565eb527 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.413217] env[68285]: DEBUG oslo_vmware.api [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891084, 'name': PowerOffVM_Task, 'duration_secs': 0.364045} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.414310] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 778.414480] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 778.414968] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b112acb5-a244-45ff-8d85-548b87cfe6be {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.486721] env[68285]: DEBUG oslo_vmware.api [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891085, 'name': PowerOffVM_Task, 'duration_secs': 0.172887} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.487053] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 778.487322] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 778.487478] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f92206d-d418-4fd9-8286-bf0ac34f6f13 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.493653] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 778.493871] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 778.494067] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Deleting the datastore file [datastore2] 682c3b6e-a605-486a-86c8-af173d80cbcf {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 778.494453] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65132a2a-16ef-4843-b2ac-df706f68aeda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.501743] env[68285]: DEBUG oslo_vmware.api [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for the task: (returnval){ [ 778.501743] env[68285]: value = "task-2891088" [ 778.501743] env[68285]: _type = "Task" [ 778.501743] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.514847] env[68285]: DEBUG oslo_vmware.api [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891088, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.523999] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 778.524671] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 778.524671] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Deleting the datastore file [datastore1] 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 778.531679] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b91f58c3-d13c-4efc-bd40-229750580c5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.535193] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5276e66f-f2d0-8d18-4f14-44e3f836e0a3, 'name': SearchDatastore_Task, 'duration_secs': 0.0177} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.536261] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.536457] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 778.537382] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eea1e458-8c0b-4581-837e-b584ab65d1a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.544271] env[68285]: DEBUG oslo_vmware.api [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for the task: (returnval){ [ 778.544271] env[68285]: value = "task-2891089" [ 778.544271] env[68285]: _type = "Task" [ 778.544271] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.550730] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 778.550730] env[68285]: value = "task-2891090" [ 778.550730] env[68285]: _type = "Task" [ 778.550730] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.564238] env[68285]: DEBUG oslo_vmware.api [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.571584] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.805146] env[68285]: INFO nova.compute.manager [-] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Took 2.17 seconds to deallocate network for instance. [ 778.842640] env[68285]: DEBUG nova.compute.utils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.847022] env[68285]: DEBUG nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 778.847022] env[68285]: DEBUG nova.network.neutron [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 779.004672] env[68285]: DEBUG nova.policy [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49c41aed1d5444e1a2e73ab6ad55fec4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53c6c9c73f07454fbe69beeee428a15a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 779.023376] env[68285]: DEBUG oslo_vmware.api [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Task: {'id': task-2891088, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231685} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.023971] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 779.024234] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 779.024413] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 779.024613] env[68285]: INFO nova.compute.manager [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Took 1.17 seconds to destroy the instance on the hypervisor. [ 779.024920] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 779.025148] env[68285]: DEBUG nova.compute.manager [-] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 779.027388] env[68285]: DEBUG nova.network.neutron [-] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.060802] env[68285]: DEBUG oslo_vmware.api [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Task: {'id': task-2891089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160941} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.061863] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 779.062674] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 779.062674] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 779.062674] env[68285]: INFO nova.compute.manager [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 779.063030] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 779.067727] env[68285]: DEBUG nova.compute.manager [-] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 779.067727] env[68285]: DEBUG nova.network.neutron [-] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.075392] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891090, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.147750] env[68285]: DEBUG nova.network.neutron [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Successfully created port: d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.219978] env[68285]: DEBUG nova.network.neutron [-] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.258480] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b74c475-95e4-4367-bee4-4ee179987c62 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.270049] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e9f116-e0d3-4de1-9769-cee27aeb9541 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.304917] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f718224-c182-4664-a057-537a74b2d7d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.312371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.316281] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e527e5d0-b511-4eae-b17d-20bfa3f93b8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.336965] env[68285]: DEBUG nova.compute.provider_tree [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.351396] env[68285]: DEBUG nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 779.695978] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.669575} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.699194] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.699638] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.700095] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74a25923-0726-4610-a727-0a475d550393 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.712708] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 779.712708] env[68285]: value = "task-2891091" [ 779.712708] env[68285]: _type = "Task" [ 779.712708] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.728158] env[68285]: DEBUG nova.network.neutron [-] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.729713] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891091, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.840148] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.840416] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.841180] env[68285]: DEBUG nova.scheduler.client.report [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 779.899838] env[68285]: DEBUG nova.network.neutron [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Successfully updated port: 23fd8792-3c59-451c-9424-1043ad4846a0 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 780.212570] env[68285]: DEBUG nova.network.neutron [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Successfully created port: ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.227532] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891091, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078111} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.227825] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.229578] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296c20fd-442c-439f-a56f-a968f5d2f908 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.235117] env[68285]: INFO nova.compute.manager [-] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Took 1.17 seconds to deallocate network for instance. [ 780.264671] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.265357] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afd6f1ec-5268-458c-a649-55557d51bc43 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.290766] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 780.290766] env[68285]: value = "task-2891092" [ 780.290766] env[68285]: _type = "Task" [ 780.290766] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.301580] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891092, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.348497] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.020s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.353623] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.905s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.353623] env[68285]: INFO nova.compute.claims [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.360738] env[68285]: DEBUG nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 780.397550] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:50:29Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='850515881',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-909427701',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 780.397878] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.398242] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 780.398242] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.398386] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 780.398547] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 780.398743] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 780.398874] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 780.399074] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 780.399257] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 780.399441] env[68285]: DEBUG nova.virt.hardware [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 780.400374] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da07120b-35a7-455e-bfff-ee7125eeb4d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.406395] env[68285]: INFO nova.scheduler.client.report [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Deleted allocations for instance 105f0ad6-1591-40b9-997c-280860bd6501 [ 780.407123] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "refresh_cache-60144efd-061e-4144-9541-b2321c9b0ec1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.407697] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "refresh_cache-60144efd-061e-4144-9541-b2321c9b0ec1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.407913] env[68285]: DEBUG nova.network.neutron [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.422887] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7303a038-df4a-4510-bc45-96961a1da6e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.767991] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.805546] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891092, 'name': ReconfigVM_Task, 'duration_secs': 0.347733} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.806098] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Reconfigured VM instance instance-0000000b to attach disk [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.806730] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83213ded-a2e9-4c2e-9503-a655a9f03ec7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.833549] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 780.833549] env[68285]: value = "task-2891093" [ 780.833549] env[68285]: _type = "Task" [ 780.833549] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.854299] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891093, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.920353] env[68285]: DEBUG oslo_concurrency.lockutils [None req-701dcdc6-2a1d-41df-ba89-40073b1004b2 tempest-DeleteServersAdminTestJSON-2123642768 tempest-DeleteServersAdminTestJSON-2123642768-project-admin] Lock "105f0ad6-1591-40b9-997c-280860bd6501" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.457s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.036896] env[68285]: DEBUG nova.network.neutron [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.175240] env[68285]: DEBUG nova.network.neutron [-] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.352653] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891093, 'name': Rename_Task, 'duration_secs': 0.185739} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.352653] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 781.352653] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d500687-f483-4b0c-986b-3b7e1476a0cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.367244] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 781.367244] env[68285]: value = "task-2891094" [ 781.367244] env[68285]: _type = "Task" [ 781.367244] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.378611] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.566513] env[68285]: DEBUG nova.network.neutron [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Updating instance_info_cache with network_info: [{"id": "23fd8792-3c59-451c-9424-1043ad4846a0", "address": "fa:16:3e:bb:f1:26", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23fd8792-3c", "ovs_interfaceid": "23fd8792-3c59-451c-9424-1043ad4846a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.683775] env[68285]: INFO nova.compute.manager [-] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Took 2.66 seconds to deallocate network for instance. [ 781.725402] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bb9163-59ce-4a83-9d55-23a06a536b24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.742971] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5fb2ae-745c-45cb-b165-aeea897184a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.787760] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413303c9-046b-4de2-b28e-873556f8596c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.797120] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dc483e-a896-4658-970d-2d05f5f39211 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.815513] env[68285]: DEBUG nova.compute.provider_tree [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.881776] env[68285]: DEBUG oslo_vmware.api [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891094, 'name': PowerOnVM_Task, 'duration_secs': 0.50681} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.881776] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.881776] env[68285]: INFO nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Took 8.66 seconds to spawn the instance on the hypervisor. [ 781.881776] env[68285]: DEBUG nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.881776] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbbca96-c71a-4cb1-8078-f94e77b8a2d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.076387] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "refresh_cache-60144efd-061e-4144-9541-b2321c9b0ec1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.076387] env[68285]: DEBUG nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Instance network_info: |[{"id": "23fd8792-3c59-451c-9424-1043ad4846a0", "address": "fa:16:3e:bb:f1:26", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23fd8792-3c", "ovs_interfaceid": "23fd8792-3c59-451c-9424-1043ad4846a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 782.076528] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:f1:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23fd8792-3c59-451c-9424-1043ad4846a0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 782.084508] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 782.085371] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 782.085473] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22be6866-2191-49e9-b616-42f6e327edd0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.110275] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 782.110275] env[68285]: value = "task-2891095" [ 782.110275] env[68285]: _type = "Task" [ 782.110275] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.120217] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891095, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.194185] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.227958] env[68285]: DEBUG nova.compute.manager [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Received event network-changed-3c317408-dd23-42c9-a837-c59782c5654a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 782.228114] env[68285]: DEBUG nova.compute.manager [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Refreshing instance network info cache due to event network-changed-3c317408-dd23-42c9-a837-c59782c5654a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 782.228340] env[68285]: DEBUG oslo_concurrency.lockutils [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] Acquiring lock "refresh_cache-ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.228470] env[68285]: DEBUG oslo_concurrency.lockutils [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] Acquired lock "refresh_cache-ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.228607] env[68285]: DEBUG nova.network.neutron [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Refreshing network info cache for port 3c317408-dd23-42c9-a837-c59782c5654a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 782.323338] env[68285]: DEBUG nova.scheduler.client.report [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.405787] env[68285]: DEBUG nova.compute.manager [req-70feb67e-fd7c-4d16-bda3-927186e22d36 req-62270941-4626-4780-a02d-898844da3f75 service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Received event network-vif-plugged-23fd8792-3c59-451c-9424-1043ad4846a0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 782.405787] env[68285]: DEBUG oslo_concurrency.lockutils [req-70feb67e-fd7c-4d16-bda3-927186e22d36 req-62270941-4626-4780-a02d-898844da3f75 service nova] Acquiring lock "60144efd-061e-4144-9541-b2321c9b0ec1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.406369] env[68285]: DEBUG oslo_concurrency.lockutils [req-70feb67e-fd7c-4d16-bda3-927186e22d36 req-62270941-4626-4780-a02d-898844da3f75 service nova] Lock "60144efd-061e-4144-9541-b2321c9b0ec1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.406369] env[68285]: DEBUG oslo_concurrency.lockutils [req-70feb67e-fd7c-4d16-bda3-927186e22d36 req-62270941-4626-4780-a02d-898844da3f75 service nova] Lock "60144efd-061e-4144-9541-b2321c9b0ec1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.406369] env[68285]: DEBUG nova.compute.manager [req-70feb67e-fd7c-4d16-bda3-927186e22d36 req-62270941-4626-4780-a02d-898844da3f75 service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] No waiting events found dispatching network-vif-plugged-23fd8792-3c59-451c-9424-1043ad4846a0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 782.406369] env[68285]: WARNING nova.compute.manager [req-70feb67e-fd7c-4d16-bda3-927186e22d36 req-62270941-4626-4780-a02d-898844da3f75 service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Received unexpected event network-vif-plugged-23fd8792-3c59-451c-9424-1043ad4846a0 for instance with vm_state building and task_state spawning. [ 782.407495] env[68285]: INFO nova.compute.manager [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Took 23.67 seconds to build instance. [ 782.465148] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fec254-dc2f-fe13-f2b1-c2b83107c1d2/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 782.465736] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bd8149-11b5-4a8c-9649-a138d44d1b38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.475967] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fec254-dc2f-fe13-f2b1-c2b83107c1d2/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 782.476452] env[68285]: ERROR oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fec254-dc2f-fe13-f2b1-c2b83107c1d2/disk-0.vmdk due to incomplete transfer. [ 782.476828] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2ea4e886-22b1-483c-95a5-b6f936f3c023 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.486396] env[68285]: DEBUG oslo_vmware.rw_handles [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fec254-dc2f-fe13-f2b1-c2b83107c1d2/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 782.488017] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Uploaded image 40c3a6e3-79ad-4702-9fa4-d2bbe51f9e6f to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 782.489140] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 782.489513] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2d69a1dc-bb15-4d75-9106-5fa62932278c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.501337] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 782.501337] env[68285]: value = "task-2891096" [ 782.501337] env[68285]: _type = "Task" [ 782.501337] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.512155] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891096, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.621978] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891095, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.692577] env[68285]: DEBUG nova.network.neutron [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Successfully updated port: d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 782.833563] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.834427] env[68285]: DEBUG nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 782.839178] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.259s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.841132] env[68285]: INFO nova.compute.claims [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.911699] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19e3761f-fba5-4ce1-a01d-e3007ee7105a tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.543s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.014596] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891096, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.126155] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891095, 'name': CreateVM_Task, 'duration_secs': 0.725717} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.126416] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 783.127425] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.127538] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.128689] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 783.131087] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8c73662-d289-417a-9026-5838ffb9d5e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.135141] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 783.135141] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ba6829-d2b9-7009-067c-5ae7c4e5ebf5" [ 783.135141] env[68285]: _type = "Task" [ 783.135141] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.145540] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ba6829-d2b9-7009-067c-5ae7c4e5ebf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.200676] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.200913] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquired lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.201138] env[68285]: DEBUG nova.network.neutron [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.343018] env[68285]: DEBUG nova.compute.utils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 783.343018] env[68285]: DEBUG nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 783.343018] env[68285]: DEBUG nova.network.neutron [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 783.414082] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.513885] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891096, 'name': Destroy_Task, 'duration_secs': 0.941047} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.514187] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Destroyed the VM [ 783.514529] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 783.514862] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9ff82f15-2dcd-42cc-93fe-db398818e01e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.524274] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 783.524274] env[68285]: value = "task-2891097" [ 783.524274] env[68285]: _type = "Task" [ 783.524274] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.536884] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891097, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.538033] env[68285]: DEBUG nova.network.neutron [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Successfully updated port: ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.580451] env[68285]: DEBUG nova.policy [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '798b162100a54f3a974caa17e77f0f9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee59d7c8bf9d4e35b0c2e1861f375a1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 783.595475] env[68285]: DEBUG nova.network.neutron [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Updated VIF entry in instance network info cache for port 3c317408-dd23-42c9-a837-c59782c5654a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 783.595538] env[68285]: DEBUG nova.network.neutron [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Updating instance_info_cache with network_info: [{"id": "3c317408-dd23-42c9-a837-c59782c5654a", "address": "fa:16:3e:64:d3:c1", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c317408-dd", "ovs_interfaceid": "3c317408-dd23-42c9-a837-c59782c5654a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.648206] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ba6829-d2b9-7009-067c-5ae7c4e5ebf5, 'name': SearchDatastore_Task, 'duration_secs': 0.031721} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.648516] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.648743] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.651537] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.651537] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.651537] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.651537] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5aa3db2f-964b-4751-8091-34525da47b09 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.661081] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.661295] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 783.662097] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4d0f857-39fa-464b-9a4c-3add647a2bbb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.669371] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 783.669371] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5203f0cc-8397-8559-f499-a439fcba3ada" [ 783.669371] env[68285]: _type = "Task" [ 783.669371] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.679466] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5203f0cc-8397-8559-f499-a439fcba3ada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.767187] env[68285]: DEBUG nova.network.neutron [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.847479] env[68285]: DEBUG nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 783.953542] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.046783] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.046950] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.047175] env[68285]: DEBUG nova.network.neutron [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.048407] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891097, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.098414] env[68285]: DEBUG oslo_concurrency.lockutils [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] Releasing lock "refresh_cache-ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.098414] env[68285]: DEBUG nova.compute.manager [req-c61d13f6-a1af-4c9d-ac6b-28430a5a1bc3 req-006a09bd-c503-4352-968a-aeca2c62d4b3 service nova] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Received event network-vif-deleted-9f5b021e-af4a-40de-ac20-e018f2923ae7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 784.137627] env[68285]: DEBUG nova.network.neutron [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Updating instance_info_cache with network_info: [{"id": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "address": "fa:16:3e:28:e0:f0", "network": {"id": "c4888d55-ced4-405f-af85-64f86b5a1859", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1192509281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef2539ec26f4cb5810e41ed80f04860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13fdc9f-ab", "ovs_interfaceid": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.193176] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5203f0cc-8397-8559-f499-a439fcba3ada, 'name': SearchDatastore_Task, 'duration_secs': 0.010951} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.194241] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ef5d7a2-5793-404d-a75d-c629856a5521 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.201561] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 784.201561] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a0c520-9698-b4e0-74e9-30f73b5e706a" [ 784.201561] env[68285]: _type = "Task" [ 784.201561] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.216534] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a0c520-9698-b4e0-74e9-30f73b5e706a, 'name': SearchDatastore_Task, 'duration_secs': 0.011362} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.216866] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.217104] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 60144efd-061e-4144-9541-b2321c9b0ec1/60144efd-061e-4144-9541-b2321c9b0ec1.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 784.217373] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8999238a-dfe3-42eb-aa60-71ed363f9461 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.230521] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 784.230521] env[68285]: value = "task-2891098" [ 784.230521] env[68285]: _type = "Task" [ 784.230521] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.248563] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.329606] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0329226-1a48-42fb-b9a7-18655cb093e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.340295] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12bdf7f-d06e-4efb-ad37-163fe712b084 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.380490] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10592820-e148-433f-adc7-bb33eb68af2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.399027] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2bc4a3-d11f-487f-9fac-451abf59299f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.422742] env[68285]: DEBUG nova.compute.provider_tree [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.542499] env[68285]: DEBUG oslo_vmware.api [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891097, 'name': RemoveSnapshot_Task, 'duration_secs': 0.771601} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.542824] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 784.543135] env[68285]: INFO nova.compute.manager [None req-16986546-f862-4570-aac7-0deef3d30ff8 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Took 19.27 seconds to snapshot the instance on the hypervisor. [ 784.644322] env[68285]: DEBUG nova.network.neutron [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.645807] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Releasing lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.646340] env[68285]: DEBUG nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Instance network_info: |[{"id": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "address": "fa:16:3e:28:e0:f0", "network": {"id": "c4888d55-ced4-405f-af85-64f86b5a1859", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1192509281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef2539ec26f4cb5810e41ed80f04860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13fdc9f-ab", "ovs_interfaceid": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 784.647631] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:e0:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd13fdc9f-ab41-435a-8bd4-080dbc090832', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.660501] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Creating folder: Project (bef2539ec26f4cb5810e41ed80f04860). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.660841] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6f235aa-9458-466f-8658-6b08fa3bdbda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.679731] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Created folder: Project (bef2539ec26f4cb5810e41ed80f04860) in parent group-v580775. [ 784.679731] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Creating folder: Instances. Parent ref: group-v580812. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.679731] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ca7053b-c48a-4090-816d-39d0577e9c5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.692053] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Created folder: Instances in parent group-v580812. [ 784.692304] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.692514] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.692773] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1ad71f0-733d-4c89-9d0d-8b9ef35c661b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.714228] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.714228] env[68285]: value = "task-2891101" [ 784.714228] env[68285]: _type = "Task" [ 784.714228] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.724765] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891101, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.741911] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491936} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.742273] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 60144efd-061e-4144-9541-b2321c9b0ec1/60144efd-061e-4144-9541-b2321c9b0ec1.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 784.742552] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 784.742875] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fa5ae48-195d-4213-b681-97ab7b640915 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.753026] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 784.753026] env[68285]: value = "task-2891102" [ 784.753026] env[68285]: _type = "Task" [ 784.753026] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.763564] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.890738] env[68285]: DEBUG nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 784.930873] env[68285]: DEBUG nova.scheduler.client.report [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.937491] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 784.937973] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.938343] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 784.938583] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.939616] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 784.939854] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 784.940151] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 784.940352] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 784.940574] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 784.940776] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 784.940987] env[68285]: DEBUG nova.virt.hardware [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 784.942028] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204631b9-d77a-4548-85ae-a656578f6e11 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.955806] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4114f058-e9e2-427e-9392-cd9c25067fb7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.017776] env[68285]: DEBUG nova.network.neutron [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Updating instance_info_cache with network_info: [{"id": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "address": "fa:16:3e:55:5b:72", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccc388b0-74", "ovs_interfaceid": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.033478] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.033671] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.229665] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891101, 'name': CreateVM_Task, 'duration_secs': 0.443544} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.230784] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.230784] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.230784] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.231180] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 785.231363] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77a9b9f5-4f1f-4fa6-8c70-0baef47a0e66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.240344] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 785.240344] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dbf4f8-c89b-2a98-3b06-b3918ab26384" [ 785.240344] env[68285]: _type = "Task" [ 785.240344] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.251123] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52dbf4f8-c89b-2a98-3b06-b3918ab26384, 'name': SearchDatastore_Task, 'duration_secs': 0.010025} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.251824] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.251824] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 785.252149] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.252149] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.252298] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.256203] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94105458-2ca2-47ff-ba43-1c06eac2d751 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.264730] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090506} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.266702] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 785.267196] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.267493] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 785.270181] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76ea29d-89c9-4edc-b407-22e43587e420 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.275691] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ed03f47-704d-4f91-8015-5817c7b45823 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.282269] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 785.282269] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523c3e90-d47f-79f2-8eda-b5adcb80a5e0" [ 785.282269] env[68285]: _type = "Task" [ 785.282269] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.305276] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 60144efd-061e-4144-9541-b2321c9b0ec1/60144efd-061e-4144-9541-b2321c9b0ec1.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 785.308214] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1be61391-d41f-41dd-a67f-b53b86b5611a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.339022] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523c3e90-d47f-79f2-8eda-b5adcb80a5e0, 'name': SearchDatastore_Task, 'duration_secs': 0.01071} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.342584] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 785.342584] env[68285]: value = "task-2891103" [ 785.342584] env[68285]: _type = "Task" [ 785.342584] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.342792] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-520c37fc-327d-4591-a796-c7fd0e14566c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.362263] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 785.362263] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52846abd-0f09-3e5e-3412-0806d6b05dfc" [ 785.362263] env[68285]: _type = "Task" [ 785.362263] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.362529] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891103, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.372913] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52846abd-0f09-3e5e-3412-0806d6b05dfc, 'name': SearchDatastore_Task, 'duration_secs': 0.012677} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.373306] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.373426] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5b58896c-cb07-48c8-ace0-385486a3e19d/5b58896c-cb07-48c8-ace0-385486a3e19d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 785.373677] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aec7132d-c681-4cdb-aa0b-6e14e00c0e03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.384016] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 785.384016] env[68285]: value = "task-2891104" [ 785.384016] env[68285]: _type = "Task" [ 785.384016] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.392972] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.435219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.435570] env[68285]: DEBUG nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.441689] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.755s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.446022] env[68285]: INFO nova.compute.claims [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.520958] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Releasing lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.521465] env[68285]: DEBUG nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Instance network_info: |[{"id": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "address": "fa:16:3e:55:5b:72", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccc388b0-74", "ovs_interfaceid": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 785.522130] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:5b:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccc388b0-7423-4892-ac70-e4d86b1a0f17', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.532407] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Creating folder: Project (53c6c9c73f07454fbe69beeee428a15a). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 785.533132] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e41d0212-fb5b-4afd-a5e3-ab89c729576e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.546743] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Created folder: Project (53c6c9c73f07454fbe69beeee428a15a) in parent group-v580775. [ 785.546743] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Creating folder: Instances. Parent ref: group-v580815. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 785.547271] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99a0c3f9-a89a-48e0-a52d-52f83840f9fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.565972] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Created folder: Instances in parent group-v580815. [ 785.566163] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.566407] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.566567] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72a6160f-3f8b-4063-b166-d80a0b45cfda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.600377] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.600377] env[68285]: value = "task-2891107" [ 785.600377] env[68285]: _type = "Task" [ 785.600377] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.620342] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891107, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.637237] env[68285]: DEBUG nova.network.neutron [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Successfully created port: 4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.864823] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891103, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.898769] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891104, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.948899] env[68285]: DEBUG nova.compute.utils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 785.950970] env[68285]: DEBUG nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 785.950970] env[68285]: DEBUG nova.network.neutron [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.040223] env[68285]: DEBUG nova.policy [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ef9a5eab3094ae1aed693a32c43f2bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed60286b54da48c7b47b92c6058cb1d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.113331] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891107, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.340991] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.341362] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.364141] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891103, 'name': ReconfigVM_Task, 'duration_secs': 0.561853} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.364141] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 60144efd-061e-4144-9541-b2321c9b0ec1/60144efd-061e-4144-9541-b2321c9b0ec1.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.364141] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-118b6ca7-7224-4b71-8893-f3821a5780ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.371554] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 786.371554] env[68285]: value = "task-2891108" [ 786.371554] env[68285]: _type = "Task" [ 786.371554] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.379976] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891108, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.397069] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594718} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.397069] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5b58896c-cb07-48c8-ace0-385486a3e19d/5b58896c-cb07-48c8-ace0-385486a3e19d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 786.397069] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 786.397069] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bfd294e-4e83-46f5-92f7-879be7df85b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.404218] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 786.404218] env[68285]: value = "task-2891109" [ 786.404218] env[68285]: _type = "Task" [ 786.404218] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.417274] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891109, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.460878] env[68285]: DEBUG nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 786.611446] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891107, 'name': CreateVM_Task, 'duration_secs': 0.712684} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.612264] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.613164] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.616026] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.616026] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 786.616026] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d78d6ae3-172d-44d0-a5d9-1f034ab2248b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.620506] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 786.620506] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cd14bd-becc-7432-7987-e681fdb615c7" [ 786.620506] env[68285]: _type = "Task" [ 786.620506] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.634270] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52cd14bd-becc-7432-7987-e681fdb615c7, 'name': SearchDatastore_Task} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.634797] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.635101] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.635369] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.635536] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.635698] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.635957] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6a4b0bf-065b-48d7-8aca-afefed93a912 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.650713] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.650713] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.654021] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c097e664-e0b8-4dd4-b4d3-d2c39c8f3d1b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.658868] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 786.658868] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d8b69c-d05e-8182-bd18-8339a924fd21" [ 786.658868] env[68285]: _type = "Task" [ 786.658868] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.676711] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d8b69c-d05e-8182-bd18-8339a924fd21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.748150] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "95f5e902-6385-4602-8458-7d7b2069a9da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.748150] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "95f5e902-6385-4602-8458-7d7b2069a9da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.756033] env[68285]: DEBUG nova.network.neutron [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Successfully created port: d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.891090] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891108, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.918954] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891109, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071707} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.918954] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 786.920562] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2c19f0-603b-4153-b286-375441c5500d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.958136] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 5b58896c-cb07-48c8-ace0-385486a3e19d/5b58896c-cb07-48c8-ace0-385486a3e19d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 786.962232] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6930e2c-9054-4f47-a787-b6e8fc355a85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.987296] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 786.987296] env[68285]: value = "task-2891110" [ 786.987296] env[68285]: _type = "Task" [ 786.987296] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.002456] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.003741] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64519a06-b1e0-42bc-9cb7-b57177cda55c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.013160] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dd270d-9c04-4088-bb02-03d28e06ed58 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.047163] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11729bf2-e85b-4f6a-91ad-71c11af04b9e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.056199] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a6e79e-65d6-4c7b-9391-96249f5a59fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.073298] env[68285]: DEBUG nova.compute.provider_tree [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.171501] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d8b69c-d05e-8182-bd18-8339a924fd21, 'name': SearchDatastore_Task, 'duration_secs': 0.023077} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.172335] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f32e63e5-9918-444e-8700-8b7cc635649c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.179328] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 787.179328] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5207a0f1-5507-be98-d219-7e51d7d0f1c0" [ 787.179328] env[68285]: _type = "Task" [ 787.179328] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.188757] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5207a0f1-5507-be98-d219-7e51d7d0f1c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.384983] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891108, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.482762] env[68285]: DEBUG nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.506823] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891110, 'name': ReconfigVM_Task, 'duration_secs': 0.30834} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.510371] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 5b58896c-cb07-48c8-ace0-385486a3e19d/5b58896c-cb07-48c8-ace0-385486a3e19d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 787.511294] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15a167ed-4c29-4a8f-b920-05c27c2e8ccf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.520467] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 787.520467] env[68285]: value = "task-2891111" [ 787.520467] env[68285]: _type = "Task" [ 787.520467] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.528352] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.529164] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.529345] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.529683] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.529763] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.529900] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.530120] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.530276] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.530433] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.530584] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.530762] env[68285]: DEBUG nova.virt.hardware [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.532954] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3b4cea-43e9-4ef1-8fb7-1ecf4b806cff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.549792] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891111, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.551300] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5624c9-2e6f-4e6e-9717-b552f2405066 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.576910] env[68285]: DEBUG nova.scheduler.client.report [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.694051] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5207a0f1-5507-be98-d219-7e51d7d0f1c0, 'name': SearchDatastore_Task, 'duration_secs': 0.010377} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.694268] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.694559] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 12fad42a-1011-4563-b11f-7b141b2a1670/12fad42a-1011-4563-b11f-7b141b2a1670.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.694836] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21e06c73-58a3-418f-8dbd-349708d18d1c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.702935] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 787.702935] env[68285]: value = "task-2891112" [ 787.702935] env[68285]: _type = "Task" [ 787.702935] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.713165] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.884199] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891108, 'name': Rename_Task, 'duration_secs': 1.439988} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.884496] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 787.884804] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23a085d4-063d-4e7a-96e5-061842146fed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.892698] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 787.892698] env[68285]: value = "task-2891113" [ 787.892698] env[68285]: _type = "Task" [ 787.892698] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.904096] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891113, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.908037] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Received event network-changed-ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.908461] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Refreshing instance network info cache due to event network-changed-ae1e3da0-addf-4feb-83f8-8a52e6a74a39. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 787.908696] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquiring lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.908840] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquired lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.909010] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Refreshing network info cache for port ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.036157] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891111, 'name': Rename_Task, 'duration_secs': 0.150212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.038972] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 788.038972] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45f48cbb-7eb8-4c87-bc65-5daa53f4ccac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.047702] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 788.047702] env[68285]: value = "task-2891114" [ 788.047702] env[68285]: _type = "Task" [ 788.047702] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.058858] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891114, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.086592] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.087314] env[68285]: DEBUG nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 788.091141] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.659s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.092539] env[68285]: INFO nova.compute.claims [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.165449] env[68285]: DEBUG nova.compute.manager [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Received event network-changed-23fd8792-3c59-451c-9424-1043ad4846a0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.165651] env[68285]: DEBUG nova.compute.manager [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Refreshing instance network info cache due to event network-changed-23fd8792-3c59-451c-9424-1043ad4846a0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 788.165866] env[68285]: DEBUG oslo_concurrency.lockutils [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] Acquiring lock "refresh_cache-60144efd-061e-4144-9541-b2321c9b0ec1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.166068] env[68285]: DEBUG oslo_concurrency.lockutils [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] Acquired lock "refresh_cache-60144efd-061e-4144-9541-b2321c9b0ec1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.166525] env[68285]: DEBUG nova.network.neutron [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Refreshing network info cache for port 23fd8792-3c59-451c-9424-1043ad4846a0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.215495] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891112, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498642} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.215832] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 12fad42a-1011-4563-b11f-7b141b2a1670/12fad42a-1011-4563-b11f-7b141b2a1670.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.216144] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.216448] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14cb5fc1-54b6-485b-a7bb-e101932be256 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.224705] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 788.224705] env[68285]: value = "task-2891115" [ 788.224705] env[68285]: _type = "Task" [ 788.224705] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.233247] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891115, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.407416] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891113, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.524517] env[68285]: DEBUG nova.network.neutron [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Successfully updated port: 4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.565311] env[68285]: DEBUG oslo_vmware.api [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891114, 'name': PowerOnVM_Task, 'duration_secs': 0.516844} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.565759] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 788.566105] env[68285]: INFO nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Took 10.23 seconds to spawn the instance on the hypervisor. [ 788.566393] env[68285]: DEBUG nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 788.568149] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d269f6ef-5106-4aa7-a9d9-c0ef4cdcca29 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.598694] env[68285]: DEBUG nova.compute.utils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 788.603310] env[68285]: DEBUG nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 788.603663] env[68285]: DEBUG nova.network.neutron [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 788.718666] env[68285]: DEBUG nova.policy [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb7f978e7fa64e88af5756fca97fce6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4471597d3345443aa28b97acd91847e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 788.740312] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891115, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07093} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.741412] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.744273] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7611cf6-e390-4a5a-b70b-b7a6aff167a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.769276] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 12fad42a-1011-4563-b11f-7b141b2a1670/12fad42a-1011-4563-b11f-7b141b2a1670.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.770341] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Updated VIF entry in instance network info cache for port ae1e3da0-addf-4feb-83f8-8a52e6a74a39. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.770903] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Updating instance_info_cache with network_info: [{"id": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "address": "fa:16:3e:94:fb:43", "network": {"id": "d9bb1a73-a8c2-4023-87f2-76bdb79f714a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-108279850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb8e48ceae0748b0b8c762ab7303a4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae1e3da0-ad", "ovs_interfaceid": "ae1e3da0-addf-4feb-83f8-8a52e6a74a39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.774524] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bfcdfe1-a547-4200-845c-a75ec6d50346 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.798990] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 788.798990] env[68285]: value = "task-2891116" [ 788.798990] env[68285]: _type = "Task" [ 788.798990] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.809799] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891116, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.886243] env[68285]: DEBUG nova.network.neutron [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Successfully updated port: d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.912043] env[68285]: DEBUG oslo_vmware.api [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891113, 'name': PowerOnVM_Task, 'duration_secs': 0.59693} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.912332] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 788.912590] env[68285]: INFO nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Took 13.12 seconds to spawn the instance on the hypervisor. [ 788.916135] env[68285]: DEBUG nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 788.917166] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d887ed0-b798-4c8b-9d1b-054d233d374c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.029810] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.029810] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquired lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.029810] env[68285]: DEBUG nova.network.neutron [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.091144] env[68285]: INFO nova.compute.manager [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Took 29.25 seconds to build instance. [ 789.105317] env[68285]: DEBUG nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 789.294522] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Releasing lock "refresh_cache-52fbfbe4-1807-4d6d-9139-ebe30e6bf647" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.294779] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Received event network-vif-plugged-d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.294971] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquiring lock "5b58896c-cb07-48c8-ace0-385486a3e19d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.295203] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.295364] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.295528] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] No waiting events found dispatching network-vif-plugged-d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 789.295691] env[68285]: WARNING nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Received unexpected event network-vif-plugged-d13fdc9f-ab41-435a-8bd4-080dbc090832 for instance with vm_state building and task_state spawning. [ 789.295851] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Received event network-changed-d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.296028] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Refreshing instance network info cache due to event network-changed-d13fdc9f-ab41-435a-8bd4-080dbc090832. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 789.296189] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquiring lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.296322] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquired lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.296476] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Refreshing network info cache for port d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.309802] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891116, 'name': ReconfigVM_Task, 'duration_secs': 0.298211} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.310318] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 12fad42a-1011-4563-b11f-7b141b2a1670/12fad42a-1011-4563-b11f-7b141b2a1670.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.314500] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6987a53-996e-45c5-aadd-6d2fb36e8f37 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.322820] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 789.322820] env[68285]: value = "task-2891117" [ 789.322820] env[68285]: _type = "Task" [ 789.322820] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.335145] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891117, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.389988] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.389988] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquired lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.390184] env[68285]: DEBUG nova.network.neutron [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.418144] env[68285]: DEBUG nova.network.neutron [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Updated VIF entry in instance network info cache for port 23fd8792-3c59-451c-9424-1043ad4846a0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.418144] env[68285]: DEBUG nova.network.neutron [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Updating instance_info_cache with network_info: [{"id": "23fd8792-3c59-451c-9424-1043ad4846a0", "address": "fa:16:3e:bb:f1:26", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23fd8792-3c", "ovs_interfaceid": "23fd8792-3c59-451c-9424-1043ad4846a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.437460] env[68285]: INFO nova.compute.manager [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Took 29.68 seconds to build instance. [ 789.591411] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48815a6-cacb-4c5c-89cd-c41c7847b007 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.596518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d4fd696-b029-43b8-98c5-f5e4ddb633b9 tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.302s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.604711] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8478234-2b8f-4f83-907d-c130e64e3607 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.644211] env[68285]: DEBUG nova.network.neutron [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.646863] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404ce271-cc78-4a7d-8c7a-c1932bfb1a7c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.656131] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb06483-80e7-4749-a5e3-ad6b8469a92f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.672761] env[68285]: DEBUG nova.compute.provider_tree [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.742576] env[68285]: DEBUG nova.network.neutron [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Successfully created port: 655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.784514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.784944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.833616] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891117, 'name': Rename_Task, 'duration_secs': 0.149378} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.833927] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 789.834180] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5aba5706-05d6-43c3-a712-ed911b9065bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.841728] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 789.841728] env[68285]: value = "task-2891118" [ 789.841728] env[68285]: _type = "Task" [ 789.841728] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.854962] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.921488] env[68285]: DEBUG oslo_concurrency.lockutils [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] Releasing lock "refresh_cache-60144efd-061e-4144-9541-b2321c9b0ec1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.921787] env[68285]: DEBUG nova.compute.manager [req-e69dbfa2-4165-4c80-a74f-62e49ff9d971 req-2c480fc0-d454-4a19-9dd6-aea5d165ee7d service nova] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Received event network-vif-deleted-3e9ecf12-c47b-42e6-8dcf-0963075951af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.952982] env[68285]: DEBUG oslo_concurrency.lockutils [None req-868bd663-0697-4fde-92e4-bce5e6cc79bf tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "60144efd-061e-4144-9541-b2321c9b0ec1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.551s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.969022] env[68285]: DEBUG nova.network.neutron [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.023420] env[68285]: DEBUG nova.network.neutron [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.102881] env[68285]: DEBUG nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 790.156016] env[68285]: DEBUG nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 790.181643] env[68285]: DEBUG nova.scheduler.client.report [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.195104] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 790.195373] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.195528] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 790.195714] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.195855] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 790.195994] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 790.200392] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 790.200392] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 790.200392] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 790.200392] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 790.200392] env[68285]: DEBUG nova.virt.hardware [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 790.200763] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af20270-859e-45c2-af33-dc1eeb684f20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.219861] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c404bef7-d0ec-4406-8b3d-8e376e60ceb5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.288671] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Updated VIF entry in instance network info cache for port d13fdc9f-ab41-435a-8bd4-080dbc090832. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.289188] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Updating instance_info_cache with network_info: [{"id": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "address": "fa:16:3e:28:e0:f0", "network": {"id": "c4888d55-ced4-405f-af85-64f86b5a1859", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1192509281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef2539ec26f4cb5810e41ed80f04860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13fdc9f-ab", "ovs_interfaceid": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.291314] env[68285]: DEBUG nova.network.neutron [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updating instance_info_cache with network_info: [{"id": "d11df654-f231-443e-aa54-91844bb26c2f", "address": "fa:16:3e:75:eb:32", "network": {"id": "f3886ae5-9380-4597-a7bd-9f464be650ef", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1859029152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed60286b54da48c7b47b92c6058cb1d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11df654-f2", "ovs_interfaceid": "d11df654-f231-443e-aa54-91844bb26c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.353732] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891118, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.402445] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "c8784827-a928-439d-abdf-d82b62a61152" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.402754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "c8784827-a928-439d-abdf-d82b62a61152" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.458346] env[68285]: DEBUG nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 790.526582] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Releasing lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.526967] env[68285]: DEBUG nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Instance network_info: |[{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.527389] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:1c:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96fdbb91-eb49-4dbf-b234-5b38503d7589', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4dda7e58-86f1-4d41-ad9e-0f08c3df3241', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.536540] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Creating folder: Project (ee59d7c8bf9d4e35b0c2e1861f375a1e). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.537275] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-973d7fcc-5c81-497d-8a18-04ab3aa22a5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.549035] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Created folder: Project (ee59d7c8bf9d4e35b0c2e1861f375a1e) in parent group-v580775. [ 790.549562] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Creating folder: Instances. Parent ref: group-v580818. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.549562] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c6dd4fe-b401-491c-9d8f-eb0caa7de688 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.559192] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Created folder: Instances in parent group-v580818. [ 790.559432] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.559731] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.559898] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd49ee36-8fb6-43ae-8446-81a3b3dacbe6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.581843] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.581843] env[68285]: value = "task-2891121" [ 790.581843] env[68285]: _type = "Task" [ 790.581843] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.590617] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891121, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.638728] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.687861] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.597s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.688391] env[68285]: DEBUG nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 790.690895] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 15.619s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.793831] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Releasing lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.794216] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Received event network-vif-plugged-ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.794423] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquiring lock "12fad42a-1011-4563-b11f-7b141b2a1670-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.794636] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Lock "12fad42a-1011-4563-b11f-7b141b2a1670-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.794789] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Lock "12fad42a-1011-4563-b11f-7b141b2a1670-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.794954] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] No waiting events found dispatching network-vif-plugged-ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.795235] env[68285]: WARNING nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Received unexpected event network-vif-plugged-ccc388b0-7423-4892-ac70-e4d86b1a0f17 for instance with vm_state building and task_state spawning. [ 790.795413] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Received event network-changed-ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.795584] env[68285]: DEBUG nova.compute.manager [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Refreshing instance network info cache due to event network-changed-ccc388b0-7423-4892-ac70-e4d86b1a0f17. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 790.795770] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquiring lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.795940] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Acquired lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.796123] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Refreshing network info cache for port ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.800544] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Releasing lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.800544] env[68285]: DEBUG nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Instance network_info: |[{"id": "d11df654-f231-443e-aa54-91844bb26c2f", "address": "fa:16:3e:75:eb:32", "network": {"id": "f3886ae5-9380-4597-a7bd-9f464be650ef", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1859029152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed60286b54da48c7b47b92c6058cb1d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11df654-f2", "ovs_interfaceid": "d11df654-f231-443e-aa54-91844bb26c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.800741] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:eb:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4712af2-45ef-4652-8d2c-482ec70056d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd11df654-f231-443e-aa54-91844bb26c2f', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.810046] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Creating folder: Project (ed60286b54da48c7b47b92c6058cb1d4). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.811355] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebb3d6e0-5e25-450f-8135-4b63084d123e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.823978] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Created folder: Project (ed60286b54da48c7b47b92c6058cb1d4) in parent group-v580775. [ 790.824206] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Creating folder: Instances. Parent ref: group-v580821. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.824449] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0dccd9a-0031-40fa-9b36-e42fcf74b7dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.836242] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Created folder: Instances in parent group-v580821. [ 790.836490] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.836695] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.836916] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02ea2725-0e6b-437d-bfc7-11e3e4a031ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.863364] env[68285]: DEBUG oslo_vmware.api [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891118, 'name': PowerOnVM_Task, 'duration_secs': 0.53086} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.865627] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 790.866193] env[68285]: INFO nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Took 10.51 seconds to spawn the instance on the hypervisor. [ 790.866193] env[68285]: DEBUG nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 790.866306] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.866306] env[68285]: value = "task-2891124" [ 790.866306] env[68285]: _type = "Task" [ 790.866306] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.866932] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f655084-114b-456a-b220-a1d3dda902cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.882627] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891124, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.985327] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.092433] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891121, 'name': CreateVM_Task, 'duration_secs': 0.49996} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.092659] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.093440] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.093675] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.094056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.094318] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f12583aa-8e13-4550-a94f-87c66efb1944 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.098946] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 791.098946] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b0c567-58b2-217d-04f0-73933d9f9a43" [ 791.098946] env[68285]: _type = "Task" [ 791.098946] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.111475] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b0c567-58b2-217d-04f0-73933d9f9a43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.194581] env[68285]: DEBUG nova.compute.utils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 791.200376] env[68285]: INFO nova.compute.claims [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.206353] env[68285]: DEBUG nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 791.206899] env[68285]: DEBUG nova.network.neutron [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.279234] env[68285]: DEBUG nova.policy [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '712fb26143084c72a09ca405f7f44467', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0df6f9cd11e4cbea0a5d25e546ade05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 791.380998] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891124, 'name': CreateVM_Task, 'duration_secs': 0.43664} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.380998] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.381180] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.391982] env[68285]: INFO nova.compute.manager [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Took 30.27 seconds to build instance. [ 791.611412] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b0c567-58b2-217d-04f0-73933d9f9a43, 'name': SearchDatastore_Task, 'duration_secs': 0.022875} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.614703] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.616677] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.616677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.616807] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.616958] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.617910] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.618964] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.618964] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3093fa6e-55a0-44d7-8681-c46513c1e7f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.622061] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9059225a-23b8-410f-94da-6b7f1babeef0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.627611] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 791.627611] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5206ff65-f490-7e5f-ec4f-c5f8f831279b" [ 791.627611] env[68285]: _type = "Task" [ 791.627611] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.632860] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.633052] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.636809] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76df6c8c-440a-458c-a4e9-bea0b402d2f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.639521] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5206ff65-f490-7e5f-ec4f-c5f8f831279b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.642357] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 791.642357] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d616cd-905f-99ab-cccc-a2637c3e0840" [ 791.642357] env[68285]: _type = "Task" [ 791.642357] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.651328] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d616cd-905f-99ab-cccc-a2637c3e0840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.708286] env[68285]: INFO nova.compute.resource_tracker [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating resource usage from migration ee2c2826-d8be-4236-8069-9c4a38957ca5 [ 791.713220] env[68285]: DEBUG nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 791.865473] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Updated VIF entry in instance network info cache for port ccc388b0-7423-4892-ac70-e4d86b1a0f17. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 791.865855] env[68285]: DEBUG nova.network.neutron [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Updating instance_info_cache with network_info: [{"id": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "address": "fa:16:3e:55:5b:72", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccc388b0-74", "ovs_interfaceid": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.879453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "65f289bb-6e97-47ad-8531-c06a9cce302f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.879453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.896285] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e000f485-a65e-4d35-ac57-48c518b22413 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "12fad42a-1011-4563-b11f-7b141b2a1670" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.783s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.918178] env[68285]: DEBUG nova.network.neutron [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Successfully created port: 03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.974374] env[68285]: DEBUG nova.network.neutron [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Successfully updated port: 655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 792.138075] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5206ff65-f490-7e5f-ec4f-c5f8f831279b, 'name': SearchDatastore_Task, 'duration_secs': 0.033409} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.138579] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.138579] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.138786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.153577] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d616cd-905f-99ab-cccc-a2637c3e0840, 'name': SearchDatastore_Task, 'duration_secs': 0.025975} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.156221] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3f52e65-8685-4424-9afd-b97d090bb935 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.159357] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623dc4ce-f99b-4b7f-9508-801981bfbe92 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.164651] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 792.164651] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524c4421-1e56-f107-148f-16d8689ce4ae" [ 792.164651] env[68285]: _type = "Task" [ 792.164651] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.170346] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda04329-c6e4-41c6-b713-1c6994b0a987 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.177356] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524c4421-1e56-f107-148f-16d8689ce4ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.208389] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a96c90-3719-439a-8270-0893b357969a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.216603] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125e1ea2-f3aa-4657-9107-a84b7055d11c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.235040] env[68285]: DEBUG nova.compute.provider_tree [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.369088] env[68285]: DEBUG nova.compute.manager [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Received event network-vif-plugged-d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.369135] env[68285]: DEBUG oslo_concurrency.lockutils [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] Acquiring lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.369325] env[68285]: DEBUG oslo_concurrency.lockutils [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.369486] env[68285]: DEBUG oslo_concurrency.lockutils [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.369647] env[68285]: DEBUG nova.compute.manager [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] No waiting events found dispatching network-vif-plugged-d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 792.369792] env[68285]: WARNING nova.compute.manager [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Received unexpected event network-vif-plugged-d11df654-f231-443e-aa54-91844bb26c2f for instance with vm_state building and task_state spawning. [ 792.370034] env[68285]: DEBUG nova.compute.manager [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Received event network-changed-d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.370135] env[68285]: DEBUG nova.compute.manager [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Refreshing instance network info cache due to event network-changed-d11df654-f231-443e-aa54-91844bb26c2f. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 792.370319] env[68285]: DEBUG oslo_concurrency.lockutils [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] Acquiring lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.370450] env[68285]: DEBUG oslo_concurrency.lockutils [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] Acquired lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.370600] env[68285]: DEBUG nova.network.neutron [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Refreshing network info cache for port d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.372419] env[68285]: DEBUG oslo_concurrency.lockutils [req-6b2e7063-c18b-4a2d-a607-49617fc4a901 req-8cc512d2-3f2d-4851-aa97-241fe5a05838 service nova] Releasing lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.402275] env[68285]: DEBUG nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.477581] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.477826] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.477925] env[68285]: DEBUG nova.network.neutron [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 792.495018] env[68285]: DEBUG nova.compute.manager [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Received event network-vif-plugged-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.495018] env[68285]: DEBUG oslo_concurrency.lockutils [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] Acquiring lock "87582063-50f9-4518-ad2d-915c9cd49b19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.495018] env[68285]: DEBUG oslo_concurrency.lockutils [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] Lock "87582063-50f9-4518-ad2d-915c9cd49b19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.495018] env[68285]: DEBUG oslo_concurrency.lockutils [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] Lock "87582063-50f9-4518-ad2d-915c9cd49b19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.495018] env[68285]: DEBUG nova.compute.manager [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] No waiting events found dispatching network-vif-plugged-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 792.495310] env[68285]: WARNING nova.compute.manager [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Received unexpected event network-vif-plugged-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 for instance with vm_state building and task_state spawning. [ 792.495310] env[68285]: DEBUG nova.compute.manager [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Received event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.495310] env[68285]: DEBUG nova.compute.manager [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing instance network info cache due to event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 792.495490] env[68285]: DEBUG oslo_concurrency.lockutils [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] Acquiring lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.495908] env[68285]: DEBUG oslo_concurrency.lockutils [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] Acquired lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.496171] env[68285]: DEBUG nova.network.neutron [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.683027] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524c4421-1e56-f107-148f-16d8689ce4ae, 'name': SearchDatastore_Task, 'duration_secs': 0.028525} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.683305] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.683565] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 87582063-50f9-4518-ad2d-915c9cd49b19/87582063-50f9-4518-ad2d-915c9cd49b19.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.683930] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.684144] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.684363] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-079a28f5-fb16-4e56-9904-6a24fd89cc86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.686469] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f69a458d-ce90-4e87-a1d7-56bf6d5faff5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.693642] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 792.693642] env[68285]: value = "task-2891125" [ 792.693642] env[68285]: _type = "Task" [ 792.693642] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.698372] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.698572] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.699707] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cea8882f-a9cf-487a-ace1-37628491c699 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.706088] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.708741] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 792.708741] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52af4e1e-f281-b74c-7a7b-fde19c168445" [ 792.708741] env[68285]: _type = "Task" [ 792.708741] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.716457] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52af4e1e-f281-b74c-7a7b-fde19c168445, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.737922] env[68285]: DEBUG nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 792.740658] env[68285]: DEBUG nova.scheduler.client.report [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.773254] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.773813] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.773862] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.774134] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.774367] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.774603] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.774849] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.775023] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.775291] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.775343] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.775590] env[68285]: DEBUG nova.virt.hardware [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.776797] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf322d8-1fec-4721-b64e-aa7857a15efa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.785546] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83a118f-97f8-4f7a-83a8-c9af02f0b266 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.922753] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.006027] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "1c42043d-f8db-4cb9-8147-48d0d32c982b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.006027] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.024023] env[68285]: DEBUG nova.network.neutron [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.180907] env[68285]: DEBUG nova.network.neutron [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updating instance_info_cache with network_info: [{"id": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "address": "fa:16:3e:91:6c:b0", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap655ee17d-c9", "ovs_interfaceid": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.209190] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891125, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510804} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.209535] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 87582063-50f9-4518-ad2d-915c9cd49b19/87582063-50f9-4518-ad2d-915c9cd49b19.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.209750] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.212859] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d29381d9-240a-4e68-9887-038d6c17ef1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.220904] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52af4e1e-f281-b74c-7a7b-fde19c168445, 'name': SearchDatastore_Task, 'duration_secs': 0.008238} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.222908] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 793.222908] env[68285]: value = "task-2891126" [ 793.222908] env[68285]: _type = "Task" [ 793.222908] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.223208] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d6f4649-d976-43a4-9ca5-8dad9eaca9aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.238193] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.238616] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 793.238616] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e2493a-ebc3-ad1f-d50b-0871ba988414" [ 793.238616] env[68285]: _type = "Task" [ 793.238616] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.248908] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.557s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.248908] env[68285]: INFO nova.compute.manager [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Migrating [ 793.249057] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.249322] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.251793] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e2493a-ebc3-ad1f-d50b-0871ba988414, 'name': SearchDatastore_Task, 'duration_secs': 0.010335} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.252099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.817s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.254021] env[68285]: INFO nova.compute.claims [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.256389] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.256788] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 9f4b2b94-ec19-4a8e-8663-ab71c417d093/9f4b2b94-ec19-4a8e-8663-ab71c417d093.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 793.257189] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15ee7d2d-b8c0-4708-8aa1-544f9f19268f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.268285] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 793.268285] env[68285]: value = "task-2891127" [ 793.268285] env[68285]: _type = "Task" [ 793.268285] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.276638] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.377354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "34aeba05-804e-444c-8e58-69c7721b10b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.377638] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.413781] env[68285]: DEBUG nova.network.neutron [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Successfully updated port: 03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 793.503370] env[68285]: DEBUG nova.network.neutron [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updated VIF entry in instance network info cache for port d11df654-f231-443e-aa54-91844bb26c2f. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.503736] env[68285]: DEBUG nova.network.neutron [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updating instance_info_cache with network_info: [{"id": "d11df654-f231-443e-aa54-91844bb26c2f", "address": "fa:16:3e:75:eb:32", "network": {"id": "f3886ae5-9380-4597-a7bd-9f464be650ef", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1859029152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed60286b54da48c7b47b92c6058cb1d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11df654-f2", "ovs_interfaceid": "d11df654-f231-443e-aa54-91844bb26c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.583686] env[68285]: DEBUG nova.network.neutron [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updated VIF entry in instance network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.584156] env[68285]: DEBUG nova.network.neutron [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.685326] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.685656] env[68285]: DEBUG nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Instance network_info: |[{"id": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "address": "fa:16:3e:91:6c:b0", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap655ee17d-c9", "ovs_interfaceid": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 793.686541] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:6c:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '655ee17d-c9b8-43d9-b783-8c0a559a8300', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 793.694632] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating folder: Project (4471597d3345443aa28b97acd91847e0). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.695069] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fdd0fa3-ced5-4708-b7ad-e0c921f7898a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.709524] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created folder: Project (4471597d3345443aa28b97acd91847e0) in parent group-v580775. [ 793.709524] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating folder: Instances. Parent ref: group-v580824. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.709524] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a61673bf-8d0a-424a-ae3a-fe9717ebfb7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.722509] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created folder: Instances in parent group-v580824. [ 793.722879] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 793.724014] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 793.724014] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-842bca69-4643-45bc-af2e-c8bc7466e319 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.748858] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06318} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.750185] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 793.750456] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 793.750456] env[68285]: value = "task-2891130" [ 793.750456] env[68285]: _type = "Task" [ 793.750456] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.751139] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8404e8-9398-48da-9f6a-bd11fec5e3b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.753761] env[68285]: INFO nova.compute.rpcapi [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 793.754278] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.807117] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 87582063-50f9-4518-ad2d-915c9cd49b19/87582063-50f9-4518-ad2d-915c9cd49b19.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.812109] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cea81d6-e702-47da-a895-fb62b1e086b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.827220] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891130, 'name': CreateVM_Task} progress is 15%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.833964] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456092} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.835840] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 9f4b2b94-ec19-4a8e-8663-ab71c417d093/9f4b2b94-ec19-4a8e-8663-ab71c417d093.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.836205] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.836727] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 793.836727] env[68285]: value = "task-2891131" [ 793.836727] env[68285]: _type = "Task" [ 793.836727] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.838118] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fff212a9-7b2b-48d9-96d1-70a1b3d5fcac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.847737] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891131, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.849336] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 793.849336] env[68285]: value = "task-2891132" [ 793.849336] env[68285]: _type = "Task" [ 793.849336] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.859194] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891132, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.917142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.917142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.917142] env[68285]: DEBUG nova.network.neutron [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.006948] env[68285]: DEBUG oslo_concurrency.lockutils [req-d72ecdbc-739b-4c0a-a370-26d97eb09ef7 req-c92a16ed-dad2-45f1-9084-f4da81a2e80b service nova] Releasing lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.087569] env[68285]: DEBUG oslo_concurrency.lockutils [req-7844441b-e72a-41bd-8660-52aca123d27a req-63ff435a-f259-44d6-af93-6380fccc8744 service nova] Releasing lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.221994] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "a2a7590d-c415-4955-8a25-4b1411449557" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.222317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "a2a7590d-c415-4955-8a25-4b1411449557" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.265586] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891130, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.279970] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.279970] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.279970] env[68285]: DEBUG nova.network.neutron [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.350993] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.366583] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891132, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066135} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.366583] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.366583] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9bc352-8472-40cc-8b8a-104c2a32da66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.390152] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 9f4b2b94-ec19-4a8e-8663-ab71c417d093/9f4b2b94-ec19-4a8e-8663-ab71c417d093.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.394162] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd88c17a-fde1-447f-bd8d-290d104acf38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.416176] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 794.416176] env[68285]: value = "task-2891133" [ 794.416176] env[68285]: _type = "Task" [ 794.416176] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.429349] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891133, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.507820] env[68285]: DEBUG nova.network.neutron [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.782769] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891130, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.831344] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adabfc9f-3c66-48c0-9f98-03fbb7caf99e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.844491] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66210b69-ad47-4edb-99c6-c7172710f18e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.855645] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.887307] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06041e0-701e-4edb-96ff-6fd32c722825 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.902269] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6441977-49b0-40f7-8728-ae07407583a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.916479] env[68285]: DEBUG nova.compute.provider_tree [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.927409] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.162886] env[68285]: DEBUG nova.network.neutron [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updating instance_info_cache with network_info: [{"id": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "address": "fa:16:3e:4e:b9:bd", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03219bf0-d5", "ovs_interfaceid": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.229880] env[68285]: DEBUG nova.network.neutron [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance_info_cache with network_info: [{"id": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "address": "fa:16:3e:f3:99:c3", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84cbe58d-a7", "ovs_interfaceid": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.265939] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891130, 'name': CreateVM_Task, 'duration_secs': 1.373269} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.268777] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.269732] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.270033] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.270449] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.270702] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56264c3c-4f6c-4ec5-8318-8e8e539fd3ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.283426] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 795.283426] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529796fa-0cd1-fd39-512b-b5d0cbd09e9e" [ 795.283426] env[68285]: _type = "Task" [ 795.283426] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.295146] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529796fa-0cd1-fd39-512b-b5d0cbd09e9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.350858] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891131, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.419890] env[68285]: DEBUG nova.scheduler.client.report [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.434191] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.604407] env[68285]: DEBUG nova.compute.manager [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Received event network-vif-plugged-03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.604679] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Acquiring lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.604891] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.605071] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.605242] env[68285]: DEBUG nova.compute.manager [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] No waiting events found dispatching network-vif-plugged-03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 795.605626] env[68285]: WARNING nova.compute.manager [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Received unexpected event network-vif-plugged-03219bf0-d5df-4a05-8632-cb282cf3fa2e for instance with vm_state building and task_state spawning. [ 795.605626] env[68285]: DEBUG nova.compute.manager [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Received event network-changed-03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.605703] env[68285]: DEBUG nova.compute.manager [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Refreshing instance network info cache due to event network-changed-03219bf0-d5df-4a05-8632-cb282cf3fa2e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 795.605849] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Acquiring lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.607747] env[68285]: DEBUG nova.compute.manager [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Received event network-vif-plugged-655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.607933] env[68285]: DEBUG oslo_concurrency.lockutils [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] Acquiring lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.609558] env[68285]: DEBUG oslo_concurrency.lockutils [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.610856] env[68285]: DEBUG oslo_concurrency.lockutils [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.611047] env[68285]: DEBUG nova.compute.manager [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] No waiting events found dispatching network-vif-plugged-655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 795.611233] env[68285]: WARNING nova.compute.manager [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Received unexpected event network-vif-plugged-655ee17d-c9b8-43d9-b783-8c0a559a8300 for instance with vm_state building and task_state spawning. [ 795.611397] env[68285]: DEBUG nova.compute.manager [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Received event network-changed-655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.611549] env[68285]: DEBUG nova.compute.manager [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Refreshing instance network info cache due to event network-changed-655ee17d-c9b8-43d9-b783-8c0a559a8300. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 795.611727] env[68285]: DEBUG oslo_concurrency.lockutils [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] Acquiring lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.611859] env[68285]: DEBUG oslo_concurrency.lockutils [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] Acquired lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.612014] env[68285]: DEBUG nova.network.neutron [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Refreshing network info cache for port 655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.666096] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Releasing lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.669018] env[68285]: DEBUG nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Instance network_info: |[{"id": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "address": "fa:16:3e:4e:b9:bd", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03219bf0-d5", "ovs_interfaceid": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 795.669018] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Acquired lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.669299] env[68285]: DEBUG nova.network.neutron [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Refreshing network info cache for port 03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.669299] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:b9:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '12d8eedb-97cb-4d3b-b364-42d7fd8b3c85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03219bf0-d5df-4a05-8632-cb282cf3fa2e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 795.675926] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Creating folder: Project (f0df6f9cd11e4cbea0a5d25e546ade05). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 795.676772] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ed50e3c-e644-4bf1-8032-1f9eff199ae4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.691769] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Created folder: Project (f0df6f9cd11e4cbea0a5d25e546ade05) in parent group-v580775. [ 795.691769] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Creating folder: Instances. Parent ref: group-v580827. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 795.693569] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d508e8c4-8f05-4b22-b065-c6b39d2020e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.701219] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Created folder: Instances in parent group-v580827. [ 795.701469] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 795.701656] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 795.701856] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ff99db4-fe6a-4631-a233-a3b9f3ff3b6d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.724520] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 795.724520] env[68285]: value = "task-2891136" [ 795.724520] env[68285]: _type = "Task" [ 795.724520] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.732398] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891136, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.736250] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.798659] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529796fa-0cd1-fd39-512b-b5d0cbd09e9e, 'name': SearchDatastore_Task, 'duration_secs': 0.027708} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.799800] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.799800] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.800453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.800453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.800453] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.800771] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0179223-e457-4d45-a43f-896e25bd1051 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.811986] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.811986] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.811986] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06689387-cd7e-48ca-bca0-894b1eee3333 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.818887] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 795.818887] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cedaab-0879-7ccb-f0da-e0e694a122de" [ 795.818887] env[68285]: _type = "Task" [ 795.818887] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.826983] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52cedaab-0879-7ccb-f0da-e0e694a122de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.851281] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891131, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.928739] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.929438] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 795.931916] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.908s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.936228] env[68285]: INFO nova.compute.claims [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.949114] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891133, 'name': ReconfigVM_Task, 'duration_secs': 1.161302} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.949114] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 9f4b2b94-ec19-4a8e-8663-ab71c417d093/9f4b2b94-ec19-4a8e-8663-ab71c417d093.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.949737] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a127e075-1fb7-4ac1-a1f1-8f7647f8e7a5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.956221] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 795.956221] env[68285]: value = "task-2891137" [ 795.956221] env[68285]: _type = "Task" [ 795.956221] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.965746] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891137, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.217914] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.218779] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.236439] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891136, 'name': CreateVM_Task, 'duration_secs': 0.475881} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.236622] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 796.237887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.237887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.237887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 796.238192] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97008359-1a69-44c4-9c1d-cf0906528207 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.248432] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 796.248432] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5231ac72-39a4-d81c-6295-9cf0c4937ae7" [ 796.248432] env[68285]: _type = "Task" [ 796.248432] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.256630] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5231ac72-39a4-d81c-6295-9cf0c4937ae7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.334021] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52cedaab-0879-7ccb-f0da-e0e694a122de, 'name': SearchDatastore_Task, 'duration_secs': 0.012036} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.334021] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-675376fa-81f0-4e6f-ade2-352a0bf359f8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.339394] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 796.339394] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a51e9d-9089-f8bb-5397-f4cab9ae4633" [ 796.339394] env[68285]: _type = "Task" [ 796.339394] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.355417] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a51e9d-9089-f8bb-5397-f4cab9ae4633, 'name': SearchDatastore_Task, 'duration_secs': 0.011478} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.360955] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.361412] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] d0f6ab86-e18d-42ac-bcf3-94eafb1939ff/d0f6ab86-e18d-42ac-bcf3-94eafb1939ff.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.361866] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891131, 'name': ReconfigVM_Task, 'duration_secs': 2.085813} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.362191] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5c3a49f-8cfe-4f9e-b09a-1718bec28bfe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.364793] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 87582063-50f9-4518-ad2d-915c9cd49b19/87582063-50f9-4518-ad2d-915c9cd49b19.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.365552] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-324bf486-e44c-4e06-bef8-02822a81e660 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.372657] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 796.372657] env[68285]: value = "task-2891138" [ 796.372657] env[68285]: _type = "Task" [ 796.372657] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.374412] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 796.374412] env[68285]: value = "task-2891139" [ 796.374412] env[68285]: _type = "Task" [ 796.374412] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.387100] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891139, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.391456] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891138, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.442298] env[68285]: DEBUG nova.compute.utils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 796.455554] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 796.455554] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.474402] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891137, 'name': Rename_Task, 'duration_secs': 0.154264} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.474812] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.475187] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5dd2bc51-f611-4940-837e-5dd2149cca52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.483088] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 796.483088] env[68285]: value = "task-2891140" [ 796.483088] env[68285]: _type = "Task" [ 796.483088] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.494298] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.508558] env[68285]: DEBUG nova.policy [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be02bc9800624e9c8e076b1df97b9bd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc85bbdab8b44b395f0ebbf88f9df03', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 796.717259] env[68285]: DEBUG nova.network.neutron [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updated VIF entry in instance network info cache for port 655ee17d-c9b8-43d9-b783-8c0a559a8300. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 796.718700] env[68285]: DEBUG nova.network.neutron [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updating instance_info_cache with network_info: [{"id": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "address": "fa:16:3e:91:6c:b0", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap655ee17d-c9", "ovs_interfaceid": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.767007] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5231ac72-39a4-d81c-6295-9cf0c4937ae7, 'name': SearchDatastore_Task, 'duration_secs': 0.023466} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.767450] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.767738] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 796.767980] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.768145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.768321] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.768948] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0dde1919-a508-44d1-a41b-9db8a811074e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.783203] env[68285]: DEBUG nova.network.neutron [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updated VIF entry in instance network info cache for port 03219bf0-d5df-4a05-8632-cb282cf3fa2e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 796.784022] env[68285]: DEBUG nova.network.neutron [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updating instance_info_cache with network_info: [{"id": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "address": "fa:16:3e:4e:b9:bd", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03219bf0-d5", "ovs_interfaceid": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.789788] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.790042] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 796.791136] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fda1b8f-ff1e-4807-957d-46ebd1c4734a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.802358] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 796.802358] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5270cf72-98f3-39d1-422b-d70c6e9d6eb2" [ 796.802358] env[68285]: _type = "Task" [ 796.802358] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.820065] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5270cf72-98f3-39d1-422b-d70c6e9d6eb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.868008] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Successfully created port: 60f03e16-4a3a-44b1-b442-db8e844f18a3 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.887790] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891138, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.891269] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891139, 'name': Rename_Task, 'duration_secs': 0.200254} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.891658] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.891933] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b259a90e-5c58-479e-a563-f04ebd99f90b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.899119] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 796.899119] env[68285]: value = "task-2891141" [ 796.899119] env[68285]: _type = "Task" [ 796.899119] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.908299] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.945186] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 797.001257] env[68285]: DEBUG oslo_vmware.api [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891140, 'name': PowerOnVM_Task, 'duration_secs': 0.500226} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.002376] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.002731] env[68285]: INFO nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Took 9.52 seconds to spawn the instance on the hypervisor. [ 797.003145] env[68285]: DEBUG nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.004336] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6765aad5-507a-469d-bc22-03935227af33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.225220] env[68285]: DEBUG oslo_concurrency.lockutils [req-9c3575b1-7f69-426c-a1de-8934da3956c2 req-c0d8977b-362c-45f4-85e4-35b9f959ebe4 service nova] Releasing lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.262344] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d405e395-f18a-436f-89d4-e3879d6c7e6a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.286758] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance 'a97df3d2-c182-46d8-95c2-61caccade285' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 797.295021] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Releasing lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.295021] env[68285]: DEBUG nova.compute.manager [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Received event network-changed-ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.295021] env[68285]: DEBUG nova.compute.manager [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Refreshing instance network info cache due to event network-changed-ccc388b0-7423-4892-ac70-e4d86b1a0f17. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 797.295021] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Acquiring lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.295021] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Acquired lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.295682] env[68285]: DEBUG nova.network.neutron [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Refreshing network info cache for port ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.323356] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5270cf72-98f3-39d1-422b-d70c6e9d6eb2, 'name': SearchDatastore_Task, 'duration_secs': 0.063978} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.323824] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-353c8f2f-e87c-47c5-b4bd-21900d0fafd6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.330541] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 797.330541] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529bb334-b221-9940-4076-80c1c767055e" [ 797.330541] env[68285]: _type = "Task" [ 797.330541] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.343625] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529bb334-b221-9940-4076-80c1c767055e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.390711] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891138, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614489} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.393880] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] d0f6ab86-e18d-42ac-bcf3-94eafb1939ff/d0f6ab86-e18d-42ac-bcf3-94eafb1939ff.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.393880] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.394233] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d91024ff-c3c4-4065-8543-8f474bb57fd4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.400794] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 797.400794] env[68285]: value = "task-2891142" [ 797.400794] env[68285]: _type = "Task" [ 797.400794] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.416097] env[68285]: DEBUG oslo_vmware.api [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891141, 'name': PowerOnVM_Task, 'duration_secs': 0.487224} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.419511] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.419651] env[68285]: INFO nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Took 12.53 seconds to spawn the instance on the hypervisor. [ 797.420469] env[68285]: DEBUG nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.420469] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.420874] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f1beb4-b966-4790-9c41-485ab3240a96 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.532200] env[68285]: INFO nova.compute.manager [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Took 25.99 seconds to build instance. [ 797.537197] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35a2f84-91e5-41cf-bfe7-b3132b68f486 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.543984] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8145fd-de83-4ef9-ab08-dcb8bb689641 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.578465] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63663514-cd0a-4b73-b1c5-4eb4e5f63513 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.586777] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe80345-4f74-4622-94f7-e269d8ad6077 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.602910] env[68285]: DEBUG nova.compute.provider_tree [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.798643] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 797.801361] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eda8ec3b-e48d-4ebb-9c43-b219c9122c72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.811859] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 797.811859] env[68285]: value = "task-2891143" [ 797.811859] env[68285]: _type = "Task" [ 797.811859] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.841556] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529bb334-b221-9940-4076-80c1c767055e, 'name': SearchDatastore_Task, 'duration_secs': 0.031262} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.841867] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.842224] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] e3b01f87-6a4c-4127-9204-2bfa5ff28f38/e3b01f87-6a4c-4127-9204-2bfa5ff28f38.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 797.842407] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-994c94c3-84ac-4800-b182-42f56710bb4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.848999] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 797.848999] env[68285]: value = "task-2891144" [ 797.848999] env[68285]: _type = "Task" [ 797.848999] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.857793] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.913803] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155873} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.914104] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.914921] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb6f480-9a0d-4e25-afd9-2622d2727a9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.939034] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] d0f6ab86-e18d-42ac-bcf3-94eafb1939ff/d0f6ab86-e18d-42ac-bcf3-94eafb1939ff.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.948029] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d598991e-2e01-4fc8-9495-ed81ecab8062 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.962841] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 797.965230] env[68285]: INFO nova.compute.manager [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Took 27.54 seconds to build instance. [ 797.973097] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 797.973097] env[68285]: value = "task-2891145" [ 797.973097] env[68285]: _type = "Task" [ 797.973097] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.984083] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.997950] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.998329] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.998405] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.998734] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.998976] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.999234] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.999553] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.999776] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 798.001131] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 798.001131] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 798.001131] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 798.001612] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e38583-2df4-4afe-957d-eb48912e012e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.010147] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3542a5-abdc-4603-9ba1-23ac906c6eba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.039977] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4e8c1d86-ca16-4ac4-8d92-93c02472c5b6 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.306s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.106276] env[68285]: DEBUG nova.scheduler.client.report [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.322555] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891143, 'name': PowerOffVM_Task, 'duration_secs': 0.448846} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.322555] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 798.322555] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance 'a97df3d2-c182-46d8-95c2-61caccade285' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 798.361147] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891144, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.470129] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6bd45304-fb28-4bd1-994c-e5e43030c1db tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "87582063-50f9-4518-ad2d-915c9cd49b19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.872s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.484561] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.542794] env[68285]: DEBUG nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.594348] env[68285]: DEBUG nova.network.neutron [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Updated VIF entry in instance network info cache for port ccc388b0-7423-4892-ac70-e4d86b1a0f17. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.594718] env[68285]: DEBUG nova.network.neutron [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Updating instance_info_cache with network_info: [{"id": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "address": "fa:16:3e:55:5b:72", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccc388b0-74", "ovs_interfaceid": "ccc388b0-7423-4892-ac70-e4d86b1a0f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.614422] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.682s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.615145] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 798.619138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.307s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.619390] env[68285]: DEBUG nova.objects.instance [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lazy-loading 'resources' on Instance uuid f26a5b02-c71f-4f04-a8b2-4e284a6e37a6 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.674289] env[68285]: DEBUG nova.compute.manager [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.675260] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330620ee-64cd-4c05-8e9b-366d97af7b7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.688847] env[68285]: DEBUG nova.compute.manager [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Received event network-changed-d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 798.688971] env[68285]: DEBUG nova.compute.manager [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Refreshing instance network info cache due to event network-changed-d13fdc9f-ab41-435a-8bd4-080dbc090832. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 798.690025] env[68285]: DEBUG oslo_concurrency.lockutils [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] Acquiring lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.690025] env[68285]: DEBUG oslo_concurrency.lockutils [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] Acquired lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.690025] env[68285]: DEBUG nova.network.neutron [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Refreshing network info cache for port d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 798.704547] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Successfully updated port: 60f03e16-4a3a-44b1-b442-db8e844f18a3 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 798.829036] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 798.829442] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.829442] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 798.829534] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.829647] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 798.829870] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 798.830197] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 798.830448] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 798.830720] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 798.830940] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 798.831173] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 798.836359] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6606c0d5-561c-40ac-b406-19c9756ccdd4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.855844] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 798.855844] env[68285]: value = "task-2891146" [ 798.855844] env[68285]: _type = "Task" [ 798.855844] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.864261] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.865232} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.865146] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] e3b01f87-6a4c-4127-9204-2bfa5ff28f38/e3b01f87-6a4c-4127-9204-2bfa5ff28f38.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 798.865302] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 798.865587] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-525b4d84-bf12-4e1a-8590-4f1de8ce6254 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.871030] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891146, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.875640] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 798.875640] env[68285]: value = "task-2891147" [ 798.875640] env[68285]: _type = "Task" [ 798.875640] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.885532] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.974035] env[68285]: DEBUG nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.989456] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891145, 'name': ReconfigVM_Task, 'duration_secs': 0.891464} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.989811] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Reconfigured VM instance instance-00000011 to attach disk [datastore2] d0f6ab86-e18d-42ac-bcf3-94eafb1939ff/d0f6ab86-e18d-42ac-bcf3-94eafb1939ff.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.991204] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d531dec-fe6e-4003-9b0f-7fab65221214 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.997361] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 798.997361] env[68285]: value = "task-2891148" [ 798.997361] env[68285]: _type = "Task" [ 798.997361] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.006022] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891148, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.066755] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.100393] env[68285]: DEBUG oslo_concurrency.lockutils [req-bc580618-3cba-4184-925e-56d9cde906d8 req-60201d25-e50e-4fb4-bf39-d4b379ada82c service nova] Releasing lock "refresh_cache-12fad42a-1011-4563-b11f-7b141b2a1670" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.120884] env[68285]: DEBUG nova.compute.utils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 799.122300] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 799.122437] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 799.169857] env[68285]: DEBUG nova.policy [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be02bc9800624e9c8e076b1df97b9bd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc85bbdab8b44b395f0ebbf88f9df03', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 799.192476] env[68285]: INFO nova.compute.manager [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] instance snapshotting [ 799.198637] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c250df-d056-42fd-a88f-5c91ab3fb393 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.231019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "refresh_cache-81fe4854-1094-4c42-9df5-05325d961146" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.231019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "refresh_cache-81fe4854-1094-4c42-9df5-05325d961146" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.231019] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.238120] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee84613-afb6-452e-843b-96d778a3a480 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.375216] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891146, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.385323] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093836} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.386223] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 799.386417] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ed43f4-1ae9-42d1-96d6-988ff5f324e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.410538] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] e3b01f87-6a4c-4127-9204-2bfa5ff28f38/e3b01f87-6a4c-4127-9204-2bfa5ff28f38.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 799.410698] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e63d3a6-b625-4f4e-ad4a-a662ab06cabf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.436617] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 799.436617] env[68285]: value = "task-2891149" [ 799.436617] env[68285]: _type = "Task" [ 799.436617] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.447448] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.457923] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Successfully created port: 47d51556-cb83-406c-ad00-883c1493aa5f {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.507665] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.511666] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891148, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.627598] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 799.648956] env[68285]: DEBUG nova.network.neutron [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Updated VIF entry in instance network info cache for port d13fdc9f-ab41-435a-8bd4-080dbc090832. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 799.649363] env[68285]: DEBUG nova.network.neutron [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Updating instance_info_cache with network_info: [{"id": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "address": "fa:16:3e:28:e0:f0", "network": {"id": "c4888d55-ced4-405f-af85-64f86b5a1859", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1192509281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bef2539ec26f4cb5810e41ed80f04860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13fdc9f-ab", "ovs_interfaceid": "d13fdc9f-ab41-435a-8bd4-080dbc090832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.676261] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af726d1-225c-4447-99a4-00ec55a97e54 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.686073] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7709892-9408-4e6f-afb6-7d27e6d4dfc0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.718917] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6093caa-7ea0-4f3e-befd-886442aaf679 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.727435] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380bf604-dbd2-436a-ba2d-7fc849dd15d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.744138] env[68285]: DEBUG nova.compute.provider_tree [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.754469] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 799.755103] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-caa8fd91-b352-4595-98cb-5de5ccf4972c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.764473] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 799.764473] env[68285]: value = "task-2891150" [ 799.764473] env[68285]: _type = "Task" [ 799.764473] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.773615] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891150, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.792426] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.869173] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891146, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.948295] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891149, 'name': ReconfigVM_Task, 'duration_secs': 0.496582} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.948658] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Reconfigured VM instance instance-00000012 to attach disk [datastore2] e3b01f87-6a4c-4127-9204-2bfa5ff28f38/e3b01f87-6a4c-4127-9204-2bfa5ff28f38.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.949400] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-703f9409-7382-4ff6-96e7-a4299a3994e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.956120] env[68285]: DEBUG nova.compute.manager [req-1ec5eeb3-0eb2-43a7-92d5-c1d10027fece req-77c2b784-479b-4fb8-84c5-3597ec41e9d5 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Received event network-vif-plugged-60f03e16-4a3a-44b1-b442-db8e844f18a3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.956423] env[68285]: DEBUG oslo_concurrency.lockutils [req-1ec5eeb3-0eb2-43a7-92d5-c1d10027fece req-77c2b784-479b-4fb8-84c5-3597ec41e9d5 service nova] Acquiring lock "81fe4854-1094-4c42-9df5-05325d961146-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.956646] env[68285]: DEBUG oslo_concurrency.lockutils [req-1ec5eeb3-0eb2-43a7-92d5-c1d10027fece req-77c2b784-479b-4fb8-84c5-3597ec41e9d5 service nova] Lock "81fe4854-1094-4c42-9df5-05325d961146-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.956916] env[68285]: DEBUG oslo_concurrency.lockutils [req-1ec5eeb3-0eb2-43a7-92d5-c1d10027fece req-77c2b784-479b-4fb8-84c5-3597ec41e9d5 service nova] Lock "81fe4854-1094-4c42-9df5-05325d961146-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.957112] env[68285]: DEBUG nova.compute.manager [req-1ec5eeb3-0eb2-43a7-92d5-c1d10027fece req-77c2b784-479b-4fb8-84c5-3597ec41e9d5 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] No waiting events found dispatching network-vif-plugged-60f03e16-4a3a-44b1-b442-db8e844f18a3 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 799.957182] env[68285]: WARNING nova.compute.manager [req-1ec5eeb3-0eb2-43a7-92d5-c1d10027fece req-77c2b784-479b-4fb8-84c5-3597ec41e9d5 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Received unexpected event network-vif-plugged-60f03e16-4a3a-44b1-b442-db8e844f18a3 for instance with vm_state building and task_state spawning. [ 799.959153] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 799.959153] env[68285]: value = "task-2891151" [ 799.959153] env[68285]: _type = "Task" [ 799.959153] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.968547] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891151, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.007787] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891148, 'name': Rename_Task, 'duration_secs': 0.963201} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.008141] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 800.008373] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c45d2d46-6149-4399-8af3-56df678e0fa0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.015242] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 800.015242] env[68285]: value = "task-2891152" [ 800.015242] env[68285]: _type = "Task" [ 800.015242] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.023561] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.023829] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.029641] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.038221] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Updating instance_info_cache with network_info: [{"id": "60f03e16-4a3a-44b1-b442-db8e844f18a3", "address": "fa:16:3e:c1:6b:3c", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60f03e16-4a", "ovs_interfaceid": "60f03e16-4a3a-44b1-b442-db8e844f18a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.152125] env[68285]: DEBUG oslo_concurrency.lockutils [req-e084724d-e4b9-4c09-b12e-3194315ea07f req-de0aeb4e-687c-4027-8cb7-ac8f2c624d28 service nova] Releasing lock "refresh_cache-5b58896c-cb07-48c8-ace0-385486a3e19d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.247385] env[68285]: DEBUG nova.scheduler.client.report [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.275595] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891150, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.368328] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891146, 'name': ReconfigVM_Task, 'duration_secs': 1.286066} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.368642] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance 'a97df3d2-c182-46d8-95c2-61caccade285' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 800.468642] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891151, 'name': Rename_Task, 'duration_secs': 0.175598} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.469424] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 800.469424] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d013f9fd-3c2c-4af8-a4ba-02e9fd02aefb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.475288] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 800.475288] env[68285]: value = "task-2891153" [ 800.475288] env[68285]: _type = "Task" [ 800.475288] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.483504] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.527048] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891152, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.541269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "refresh_cache-81fe4854-1094-4c42-9df5-05325d961146" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.541668] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Instance network_info: |[{"id": "60f03e16-4a3a-44b1-b442-db8e844f18a3", "address": "fa:16:3e:c1:6b:3c", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60f03e16-4a", "ovs_interfaceid": "60f03e16-4a3a-44b1-b442-db8e844f18a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 800.542097] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:6b:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60f03e16-4a3a-44b1-b442-db8e844f18a3', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.549728] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Creating folder: Project (4bc85bbdab8b44b395f0ebbf88f9df03). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 800.549997] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5cb0c7f1-af21-4117-b62c-f92ce611e238 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.560057] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Created folder: Project (4bc85bbdab8b44b395f0ebbf88f9df03) in parent group-v580775. [ 800.560288] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Creating folder: Instances. Parent ref: group-v580830. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 800.560562] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-144896ab-1b4c-4e02-9f8e-bc7dd561925e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.569657] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Created folder: Instances in parent group-v580830. [ 800.570474] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 800.570474] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 800.570474] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c619d989-9570-4d9e-998f-f65f36c88963 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.597048] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.597048] env[68285]: value = "task-2891156" [ 800.597048] env[68285]: _type = "Task" [ 800.597048] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.607090] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891156, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.638822] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 800.668796] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.669074] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.671075] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.671075] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.671075] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.671075] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.671075] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.671462] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.671462] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.671462] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.671462] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.671699] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70e22a7-07be-4027-bb28-11896407ff8f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.679875] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5657902e-bc2c-45ba-afa2-d3537f60c34a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.754742] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.136s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.757754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.990s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.758311] env[68285]: DEBUG nova.objects.instance [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lazy-loading 'resources' on Instance uuid 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 800.776265] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891150, 'name': CreateSnapshot_Task, 'duration_secs': 0.874301} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.776799] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 800.777692] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cb846d-1bf8-485b-b115-c3486f598655 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.792132] env[68285]: INFO nova.scheduler.client.report [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Deleted allocations for instance f26a5b02-c71f-4f04-a8b2-4e284a6e37a6 [ 800.875318] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.876321] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.876321] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.876321] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.876321] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.876321] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.876784] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.876784] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.876932] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.877127] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.877310] env[68285]: DEBUG nova.virt.hardware [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.885018] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Reconfiguring VM instance instance-00000007 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 800.885018] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a3bc246-7042-443d-8e46-dbfda91d8f98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.908559] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 800.908559] env[68285]: value = "task-2891157" [ 800.908559] env[68285]: _type = "Task" [ 800.908559] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.917754] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891157, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.991024] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891153, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.027568] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Successfully updated port: 47d51556-cb83-406c-ad00-883c1493aa5f {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.039665] env[68285]: DEBUG oslo_vmware.api [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891152, 'name': PowerOnVM_Task, 'duration_secs': 0.738802} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.039665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.039665] env[68285]: INFO nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Took 10.88 seconds to spawn the instance on the hypervisor. [ 801.039665] env[68285]: DEBUG nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.039869] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2333ffc-89d5-4ea1-8e6b-4d657cc0cced {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.107940] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891156, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.301547] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 801.304276] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-20dbbd2a-af96-4f78-b0f5-a23cd5fc23fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.307443] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901784c7-adfe-4d94-8627-9c89404a2e4a tempest-ServerDiagnosticsTest-1060127533 tempest-ServerDiagnosticsTest-1060127533-project-member] Lock "f26a5b02-c71f-4f04-a8b2-4e284a6e37a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.346s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.313470] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 801.313470] env[68285]: value = "task-2891158" [ 801.313470] env[68285]: _type = "Task" [ 801.313470] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.325540] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891158, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.418596] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891157, 'name': ReconfigVM_Task, 'duration_secs': 0.508709} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.421694] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Reconfigured VM instance instance-00000007 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 801.422722] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c156da89-f13e-4ce3-88d2-d538e082526f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.447882] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] a97df3d2-c182-46d8-95c2-61caccade285/a97df3d2-c182-46d8-95c2-61caccade285.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.450901] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86274d0b-6ca3-4cec-817d-92ea89fd4ca0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.485283] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 801.485283] env[68285]: value = "task-2891159" [ 801.485283] env[68285]: _type = "Task" [ 801.485283] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.492072] env[68285]: DEBUG oslo_vmware.api [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891153, 'name': PowerOnVM_Task, 'duration_secs': 0.788216} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.492739] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.492956] env[68285]: INFO nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Took 8.75 seconds to spawn the instance on the hypervisor. [ 801.493149] env[68285]: DEBUG nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.493919] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d843cd-c6f4-43b3-be40-1227027d7113 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.502863] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891159, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.531080] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "refresh_cache-ee45231a-80f2-49b9-8bc7-03a0c920a668" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.532172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "refresh_cache-ee45231a-80f2-49b9-8bc7-03a0c920a668" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.532172] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.565158] env[68285]: INFO nova.compute.manager [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Took 29.91 seconds to build instance. [ 801.593759] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "437a18da-8fe4-478e-82a0-3b1a9da47df8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.594781] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.612563] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891156, 'name': CreateVM_Task, 'duration_secs': 0.726571} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.612751] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.613424] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.613590] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.613940] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 801.614926] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7113c350-23fe-406a-9a80-fea95cc9ea12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.625336] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 801.625336] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521cdaf4-4f20-d830-1f43-332c9f937ede" [ 801.625336] env[68285]: _type = "Task" [ 801.625336] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.632651] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521cdaf4-4f20-d830-1f43-332c9f937ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.826154] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891158, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.861883] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05cba72-8c3c-44fc-b623-3d80eb8ac961 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.870094] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bcd0db-2ed8-4212-bcfe-e6a6760f64ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.909187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57aa639f-ac6d-4089-a4d7-99c000b01908 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.917291] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18475a2c-fd9f-494c-9be3-83d2f9c62733 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.935409] env[68285]: DEBUG nova.compute.provider_tree [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.996760] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891159, 'name': ReconfigVM_Task, 'duration_secs': 0.434459} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.997209] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Reconfigured VM instance instance-00000007 to attach disk [datastore2] a97df3d2-c182-46d8-95c2-61caccade285/a97df3d2-c182-46d8-95c2-61caccade285.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.997503] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance 'a97df3d2-c182-46d8-95c2-61caccade285' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 802.020446] env[68285]: INFO nova.compute.manager [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Took 27.64 seconds to build instance. [ 802.066539] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.068653] env[68285]: DEBUG oslo_concurrency.lockutils [None req-807db115-5667-4d2d-86ce-eb840dfcd320 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.794s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.140021] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521cdaf4-4f20-d830-1f43-332c9f937ede, 'name': SearchDatastore_Task, 'duration_secs': 0.011625} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.140021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.140021] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.140021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.140272] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.140272] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.140272] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1903197-5039-49d9-b0d7-8441b81e1688 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.149148] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.149621] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.150353] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc72edca-db5e-467e-b43b-19a0388c3731 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.156010] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 802.156010] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52437efd-5e5f-ffbc-8bdf-8dd256aab541" [ 802.156010] env[68285]: _type = "Task" [ 802.156010] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.167702] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52437efd-5e5f-ffbc-8bdf-8dd256aab541, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.222092] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Updating instance_info_cache with network_info: [{"id": "47d51556-cb83-406c-ad00-883c1493aa5f", "address": "fa:16:3e:25:46:44", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d51556-cb", "ovs_interfaceid": "47d51556-cb83-406c-ad00-883c1493aa5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.325418] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891158, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.438697] env[68285]: DEBUG nova.scheduler.client.report [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 802.505726] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deccb785-2ee9-44bd-93fd-240cd97a9d0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.532017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26201cc4-a09c-45ce-af08-e9e796764d4d tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.516s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.533787] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a289b97-6a0e-4c64-bf54-0e2f2e8224d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.559164] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance 'a97df3d2-c182-46d8-95c2-61caccade285' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 802.572753] env[68285]: DEBUG nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.666655] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52437efd-5e5f-ffbc-8bdf-8dd256aab541, 'name': SearchDatastore_Task, 'duration_secs': 0.0124} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.667303] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b48c61cc-1b11-4084-aa57-47741b93c977 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.672633] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 802.672633] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52509560-87eb-da00-4d43-494345fd45c4" [ 802.672633] env[68285]: _type = "Task" [ 802.672633] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.680536] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52509560-87eb-da00-4d43-494345fd45c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.724920] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "refresh_cache-ee45231a-80f2-49b9-8bc7-03a0c920a668" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.725869] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Instance network_info: |[{"id": "47d51556-cb83-406c-ad00-883c1493aa5f", "address": "fa:16:3e:25:46:44", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d51556-cb", "ovs_interfaceid": "47d51556-cb83-406c-ad00-883c1493aa5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 802.726299] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:46:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47d51556-cb83-406c-ad00-883c1493aa5f', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.736116] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 802.736209] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.736414] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93c5aeea-0c16-4258-90dd-0765e4e0598f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.761920] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.761920] env[68285]: value = "task-2891160" [ 802.761920] env[68285]: _type = "Task" [ 802.761920] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.769536] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891160, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.828579] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891158, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.945048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.186s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.948152] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.754s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.948432] env[68285]: DEBUG nova.objects.instance [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lazy-loading 'resources' on Instance uuid 682c3b6e-a605-486a-86c8-af173d80cbcf {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.967693] env[68285]: INFO nova.scheduler.client.report [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Deleted allocations for instance 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa [ 803.038762] env[68285]: DEBUG nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 803.096896] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.129910] env[68285]: DEBUG nova.network.neutron [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Port 84cbe58d-a7c4-4c42-9f87-9a6b62805b10 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 803.188205] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52509560-87eb-da00-4d43-494345fd45c4, 'name': SearchDatastore_Task, 'duration_secs': 0.010206} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.188851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.189184] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 81fe4854-1094-4c42-9df5-05325d961146/81fe4854-1094-4c42-9df5-05325d961146.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.189717] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e228a3b1-1ece-4fa3-b2c0-0ca9094215bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.197134] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 803.197134] env[68285]: value = "task-2891161" [ 803.197134] env[68285]: _type = "Task" [ 803.197134] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.207698] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.276016] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891160, 'name': CreateVM_Task, 'duration_secs': 0.415955} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.276016] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.276016] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.276016] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.276016] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 803.276315] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa5dc77a-a0e2-4957-80ea-ba7846912a4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.280970] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 803.280970] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ec2e59-7059-5e60-2a54-de2cc50843c9" [ 803.280970] env[68285]: _type = "Task" [ 803.280970] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.292159] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ec2e59-7059-5e60-2a54-de2cc50843c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.294997] env[68285]: DEBUG nova.compute.manager [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Received event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.295202] env[68285]: DEBUG nova.compute.manager [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing instance network info cache due to event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 803.295446] env[68285]: DEBUG oslo_concurrency.lockutils [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] Acquiring lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.295822] env[68285]: DEBUG oslo_concurrency.lockutils [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] Acquired lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.295908] env[68285]: DEBUG nova.network.neutron [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.325282] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891158, 'name': CloneVM_Task, 'duration_secs': 1.701182} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.325698] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Created linked-clone VM from snapshot [ 803.326502] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b6cf17-599b-45cc-bcda-52ac39ab9877 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.334984] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Uploading image edbbdcbe-abdd-42f5-9d90-4932e483ae29 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 803.366334] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 803.366334] env[68285]: value = "vm-580834" [ 803.366334] env[68285]: _type = "VirtualMachine" [ 803.366334] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 803.366334] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b9f33964-edd4-4df5-84ae-81ac6be31cae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.374228] env[68285]: DEBUG nova.compute.manager [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Received event network-changed-60f03e16-4a3a-44b1-b442-db8e844f18a3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.374228] env[68285]: DEBUG nova.compute.manager [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Refreshing instance network info cache due to event network-changed-60f03e16-4a3a-44b1-b442-db8e844f18a3. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 803.374228] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Acquiring lock "refresh_cache-81fe4854-1094-4c42-9df5-05325d961146" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.374228] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Acquired lock "refresh_cache-81fe4854-1094-4c42-9df5-05325d961146" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.374228] env[68285]: DEBUG nova.network.neutron [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Refreshing network info cache for port 60f03e16-4a3a-44b1-b442-db8e844f18a3 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.377708] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lease: (returnval){ [ 803.377708] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5224e584-e232-09e6-2a88-3f0e5f59ec59" [ 803.377708] env[68285]: _type = "HttpNfcLease" [ 803.377708] env[68285]: } obtained for exporting VM: (result){ [ 803.377708] env[68285]: value = "vm-580834" [ 803.377708] env[68285]: _type = "VirtualMachine" [ 803.377708] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 803.379427] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the lease: (returnval){ [ 803.379427] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5224e584-e232-09e6-2a88-3f0e5f59ec59" [ 803.379427] env[68285]: _type = "HttpNfcLease" [ 803.379427] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 803.385808] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 803.385808] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5224e584-e232-09e6-2a88-3f0e5f59ec59" [ 803.385808] env[68285]: _type = "HttpNfcLease" [ 803.385808] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 803.386568] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 803.386568] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5224e584-e232-09e6-2a88-3f0e5f59ec59" [ 803.386568] env[68285]: _type = "HttpNfcLease" [ 803.386568] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 803.388795] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c657f163-2d59-428e-af8a-61fce6a8ed2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.397860] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a78090-b909-f429-8b7d-ce22ebdf7beb/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 803.397860] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a78090-b909-f429-8b7d-ce22ebdf7beb/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 803.480327] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9cf6ae83-add2-4c95-b9da-249e996cf151 tempest-ServerDiagnosticsV248Test-1655601150 tempest-ServerDiagnosticsV248Test-1655601150-project-member] Lock "9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.648s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.485787] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.488059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.540043] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0985dacb-51c3-4519-b825-7a0b2de0d020 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.577989] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.715371] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891161, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.793590] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ec2e59-7059-5e60-2a54-de2cc50843c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009887} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.793590] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.793794] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 803.796198] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.796384] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.796584] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 803.799690] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14132419-3358-44e5-8e0e-b31187bc6592 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.814715] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 803.815680] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 803.815869] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15470dc7-dd89-484a-b03c-9c052889d38a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.824017] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 803.824017] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522b70cd-7bba-5859-dc91-d800705eff06" [ 803.824017] env[68285]: _type = "Task" [ 803.824017] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.847680] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522b70cd-7bba-5859-dc91-d800705eff06, 'name': SearchDatastore_Task, 'duration_secs': 0.015172} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.854120] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7516aa1-0c77-4fe7-8d73-c72e0c25dbae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.864281] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 803.864281] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f1151c-0209-16cb-f345-a6640f4ee6ba" [ 803.864281] env[68285]: _type = "Task" [ 803.864281] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.877243] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f1151c-0209-16cb-f345-a6640f4ee6ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.150556] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4f99ff-8b7b-40be-8a18-26d6f0729490 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.160880] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "a97df3d2-c182-46d8-95c2-61caccade285-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.161784] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.161784] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.172352] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f40b2d3-78af-41a2-b2c0-0c0cbd1c5eb0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.214914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6befb162-8b52-493f-8e3b-5669ac781bd3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.225925] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550063} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.228319] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 81fe4854-1094-4c42-9df5-05325d961146/81fe4854-1094-4c42-9df5-05325d961146.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 804.229132] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 804.229132] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82dd4705-80b9-4a89-a0b3-ae2d67c5344a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.232278] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a581b862-e4ef-4f15-8c12-629c3b7e7bfb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.250747] env[68285]: DEBUG nova.compute.provider_tree [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.253967] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 804.253967] env[68285]: value = "task-2891163" [ 804.253967] env[68285]: _type = "Task" [ 804.253967] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.255477] env[68285]: DEBUG nova.network.neutron [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updated VIF entry in instance network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.255986] env[68285]: DEBUG nova.network.neutron [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.269359] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891163, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.375462] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f1151c-0209-16cb-f345-a6640f4ee6ba, 'name': SearchDatastore_Task, 'duration_secs': 0.013036} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.375744] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.376115] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] ee45231a-80f2-49b9-8bc7-03a0c920a668/ee45231a-80f2-49b9-8bc7-03a0c920a668.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 804.376451] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8666625-66a9-4a1c-96b9-ed543ca02265 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.383268] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 804.383268] env[68285]: value = "task-2891164" [ 804.383268] env[68285]: _type = "Task" [ 804.383268] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.392166] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.756957] env[68285]: DEBUG nova.scheduler.client.report [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 804.763801] env[68285]: DEBUG oslo_concurrency.lockutils [req-336f4960-6bd9-4cf9-850e-ca96c5388fd0 req-c2c9ff32-5aaa-414b-b294-51beaf1bf7e1 service nova] Releasing lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.777198] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891163, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.483214} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.777577] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.778342] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fb08cc-1aa1-42e7-8fb1-269f822f3485 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.807606] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 81fe4854-1094-4c42-9df5-05325d961146/81fe4854-1094-4c42-9df5-05325d961146.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.808825] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95a9d5c1-4060-41d2-b238-02e85c5b5ba9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.832621] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 804.832621] env[68285]: value = "task-2891165" [ 804.832621] env[68285]: _type = "Task" [ 804.832621] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.845497] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891165, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.847229] env[68285]: DEBUG nova.network.neutron [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Updated VIF entry in instance network info cache for port 60f03e16-4a3a-44b1-b442-db8e844f18a3. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.847439] env[68285]: DEBUG nova.network.neutron [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Updating instance_info_cache with network_info: [{"id": "60f03e16-4a3a-44b1-b442-db8e844f18a3", "address": "fa:16:3e:c1:6b:3c", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60f03e16-4a", "ovs_interfaceid": "60f03e16-4a3a-44b1-b442-db8e844f18a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.899207] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891164, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.260284] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.261334] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.261573] env[68285]: DEBUG nova.network.neutron [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.274286] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.324s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.282268] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.325s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.284380] env[68285]: INFO nova.compute.claims [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.318303] env[68285]: INFO nova.scheduler.client.report [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Deleted allocations for instance 682c3b6e-a605-486a-86c8-af173d80cbcf [ 805.344354] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.351285] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Releasing lock "refresh_cache-81fe4854-1094-4c42-9df5-05325d961146" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.351285] env[68285]: DEBUG nova.compute.manager [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Received event network-vif-plugged-47d51556-cb83-406c-ad00-883c1493aa5f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.351285] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Acquiring lock "ee45231a-80f2-49b9-8bc7-03a0c920a668-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.351285] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.352015] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.352905] env[68285]: DEBUG nova.compute.manager [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] No waiting events found dispatching network-vif-plugged-47d51556-cb83-406c-ad00-883c1493aa5f {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 805.354247] env[68285]: WARNING nova.compute.manager [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Received unexpected event network-vif-plugged-47d51556-cb83-406c-ad00-883c1493aa5f for instance with vm_state building and task_state spawning. [ 805.354247] env[68285]: DEBUG nova.compute.manager [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Received event network-changed-47d51556-cb83-406c-ad00-883c1493aa5f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.354247] env[68285]: DEBUG nova.compute.manager [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Refreshing instance network info cache due to event network-changed-47d51556-cb83-406c-ad00-883c1493aa5f. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 805.354247] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Acquiring lock "refresh_cache-ee45231a-80f2-49b9-8bc7-03a0c920a668" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.354247] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Acquired lock "refresh_cache-ee45231a-80f2-49b9-8bc7-03a0c920a668" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.354414] env[68285]: DEBUG nova.network.neutron [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Refreshing network info cache for port 47d51556-cb83-406c-ad00-883c1493aa5f {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 805.398366] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.793328} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.398743] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] ee45231a-80f2-49b9-8bc7-03a0c920a668/ee45231a-80f2-49b9-8bc7-03a0c920a668.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 805.399071] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 805.399392] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95d389db-1243-477e-8d1c-4e0a557083ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.406086] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 805.406086] env[68285]: value = "task-2891166" [ 805.406086] env[68285]: _type = "Task" [ 805.406086] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.416186] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.827576] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd9268db-5f95-4300-93bc-5baa35cc77e6 tempest-TenantUsagesTestJSON-1930448485 tempest-TenantUsagesTestJSON-1930448485-project-member] Lock "682c3b6e-a605-486a-86c8-af173d80cbcf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.485s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.847762] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891165, 'name': ReconfigVM_Task, 'duration_secs': 0.714704} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.848868] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 81fe4854-1094-4c42-9df5-05325d961146/81fe4854-1094-4c42-9df5-05325d961146.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.850985] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b85354e-1e80-4578-a429-c2b1c64e0f98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.862392] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 805.862392] env[68285]: value = "task-2891167" [ 805.862392] env[68285]: _type = "Task" [ 805.862392] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.874398] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891167, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.915906] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067296} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.916332] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 805.917225] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3144f061-b409-4f1a-9d33-c591a85ee437 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.940562] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] ee45231a-80f2-49b9-8bc7-03a0c920a668/ee45231a-80f2-49b9-8bc7-03a0c920a668.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.940983] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c2b20a0-64c4-4202-add3-2c940bc00ebd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.966389] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 805.966389] env[68285]: value = "task-2891168" [ 805.966389] env[68285]: _type = "Task" [ 805.966389] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.976826] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891168, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.281906] env[68285]: DEBUG nova.network.neutron [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance_info_cache with network_info: [{"id": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "address": "fa:16:3e:f3:99:c3", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84cbe58d-a7", "ovs_interfaceid": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.376661] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891167, 'name': Rename_Task, 'duration_secs': 0.325162} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.378049] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 806.378324] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cf57832-ce60-421c-ba95-5990d21c9cab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.386343] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 806.386343] env[68285]: value = "task-2891169" [ 806.386343] env[68285]: _type = "Task" [ 806.386343] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.387276] env[68285]: DEBUG nova.network.neutron [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Updated VIF entry in instance network info cache for port 47d51556-cb83-406c-ad00-883c1493aa5f. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 806.387617] env[68285]: DEBUG nova.network.neutron [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Updating instance_info_cache with network_info: [{"id": "47d51556-cb83-406c-ad00-883c1493aa5f", "address": "fa:16:3e:25:46:44", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d51556-cb", "ovs_interfaceid": "47d51556-cb83-406c-ad00-883c1493aa5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.401997] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891169, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.477219] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891168, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.789040] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.799353] env[68285]: DEBUG nova.compute.manager [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Received event network-changed-d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.799581] env[68285]: DEBUG nova.compute.manager [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Refreshing instance network info cache due to event network-changed-d11df654-f231-443e-aa54-91844bb26c2f. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 806.799812] env[68285]: DEBUG oslo_concurrency.lockutils [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] Acquiring lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.799962] env[68285]: DEBUG oslo_concurrency.lockutils [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] Acquired lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.800142] env[68285]: DEBUG nova.network.neutron [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Refreshing network info cache for port d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 806.903906] env[68285]: DEBUG oslo_concurrency.lockutils [req-23043be2-8950-44b0-9a70-97ab9175e6d4 req-e4250612-7e09-48fd-b87c-f479885c1eb7 service nova] Releasing lock "refresh_cache-ee45231a-80f2-49b9-8bc7-03a0c920a668" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.904225] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891169, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.914245] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c493c0-2318-46bc-9e34-8d07b3e28ce9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.923318] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7892e90-3688-4fef-ae13-a57840081474 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.956931] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbe1020-46c0-4cf6-9b93-e4e802c629aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.966330] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e38f16-285f-4d5a-9c14-d0e95a4b5b58 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.983203] env[68285]: DEBUG nova.compute.provider_tree [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.987806] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891168, 'name': ReconfigVM_Task, 'duration_secs': 0.753219} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.988678] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Reconfigured VM instance instance-00000014 to attach disk [datastore2] ee45231a-80f2-49b9-8bc7-03a0c920a668/ee45231a-80f2-49b9-8bc7-03a0c920a668.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.989659] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc8c5064-8de1-4566-8b50-0b78fd55aaa4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.998397] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 806.998397] env[68285]: value = "task-2891170" [ 806.998397] env[68285]: _type = "Task" [ 806.998397] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.009014] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891170, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.145649] env[68285]: DEBUG nova.compute.manager [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Received event network-changed-655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 807.145649] env[68285]: DEBUG nova.compute.manager [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Refreshing instance network info cache due to event network-changed-655ee17d-c9b8-43d9-b783-8c0a559a8300. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 807.146201] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] Acquiring lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.146344] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] Acquired lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.146488] env[68285]: DEBUG nova.network.neutron [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Refreshing network info cache for port 655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.335844] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92041dcd-54b2-414c-9391-113b0aa6a3b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.368755] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d88e07f-82fc-4567-b8a0-40fcdb12bfcb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.376592] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance 'a97df3d2-c182-46d8-95c2-61caccade285' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 807.400433] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891169, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.434922] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.435202] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.489711] env[68285]: DEBUG nova.scheduler.client.report [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.513174] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891170, 'name': Rename_Task, 'duration_secs': 0.196135} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.513174] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.513174] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06835f0b-7df8-4647-a616-bb7ebd76f812 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.523127] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 807.523127] env[68285]: value = "task-2891171" [ 807.523127] env[68285]: _type = "Task" [ 807.523127] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.531602] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891171, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.588250] env[68285]: DEBUG nova.network.neutron [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updated VIF entry in instance network info cache for port d11df654-f231-443e-aa54-91844bb26c2f. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 807.588822] env[68285]: DEBUG nova.network.neutron [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updating instance_info_cache with network_info: [{"id": "d11df654-f231-443e-aa54-91844bb26c2f", "address": "fa:16:3e:75:eb:32", "network": {"id": "f3886ae5-9380-4597-a7bd-9f464be650ef", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1859029152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed60286b54da48c7b47b92c6058cb1d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11df654-f2", "ovs_interfaceid": "d11df654-f231-443e-aa54-91844bb26c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.884889] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.885163] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b1bf528-bd91-4875-aa63-458e57031a29 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.899902] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 807.899902] env[68285]: value = "task-2891172" [ 807.899902] env[68285]: _type = "Task" [ 807.899902] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.908929] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891169, 'name': PowerOnVM_Task, 'duration_secs': 1.388584} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.908929] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.908929] env[68285]: INFO nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Took 9.95 seconds to spawn the instance on the hypervisor. [ 807.908929] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.910156] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e58f5b6-6daa-491f-80a2-f7b052a62e72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.916603] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891172, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.998960] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.720s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.999254] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 808.006205] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.369s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.013034] env[68285]: INFO nova.compute.claims [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.029653] env[68285]: DEBUG nova.network.neutron [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updated VIF entry in instance network info cache for port 655ee17d-c9b8-43d9-b783-8c0a559a8300. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.030044] env[68285]: DEBUG nova.network.neutron [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updating instance_info_cache with network_info: [{"id": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "address": "fa:16:3e:91:6c:b0", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap655ee17d-c9", "ovs_interfaceid": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.041293] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891171, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.096646] env[68285]: DEBUG oslo_concurrency.lockutils [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] Releasing lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.096995] env[68285]: DEBUG nova.compute.manager [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Received event network-changed-03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 808.097238] env[68285]: DEBUG nova.compute.manager [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Refreshing instance network info cache due to event network-changed-03219bf0-d5df-4a05-8632-cb282cf3fa2e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 808.097457] env[68285]: DEBUG oslo_concurrency.lockutils [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] Acquiring lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.097713] env[68285]: DEBUG oslo_concurrency.lockutils [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] Acquired lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.097925] env[68285]: DEBUG nova.network.neutron [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Refreshing network info cache for port 03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.414398] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.438924] env[68285]: INFO nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Took 33.02 seconds to build instance. [ 808.517654] env[68285]: DEBUG nova.compute.utils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 808.523074] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 808.523074] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 808.535491] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891171, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.541861] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e652868-2d06-49df-a9f8-d6d43be261ae req-b6ba1e61-7082-409b-b528-08ae33a0fd1c service nova] Releasing lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.617336] env[68285]: DEBUG nova.policy [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be02bc9800624e9c8e076b1df97b9bd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc85bbdab8b44b395f0ebbf88f9df03', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 808.916125] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891172, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.942395] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "81fe4854-1094-4c42-9df5-05325d961146" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.860s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.022507] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 809.041681] env[68285]: DEBUG nova.network.neutron [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updated VIF entry in instance network info cache for port 03219bf0-d5df-4a05-8632-cb282cf3fa2e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 809.042039] env[68285]: DEBUG nova.network.neutron [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updating instance_info_cache with network_info: [{"id": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "address": "fa:16:3e:4e:b9:bd", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03219bf0-d5", "ovs_interfaceid": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.050379] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891171, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.254895] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Successfully created port: 64daa1ff-7232-4d18-9bb9-18f48ce5df20 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.300773] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.300773] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.422279] env[68285]: DEBUG oslo_vmware.api [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891172, 'name': PowerOnVM_Task, 'duration_secs': 1.022111} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.422279] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 809.422279] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cff9973f-f0d0-44ec-b857-2d8eb1211b79 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance 'a97df3d2-c182-46d8-95c2-61caccade285' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 809.445464] env[68285]: DEBUG nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 809.544834] env[68285]: DEBUG oslo_concurrency.lockutils [req-c9c0a7b1-0f55-4dfa-95b4-33487bacb0d4 req-7b667d75-14f4-435f-a417-375345d58ee2 service nova] Releasing lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.554747] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891171, 'name': PowerOnVM_Task, 'duration_secs': 1.657022} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.555804] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 809.555804] env[68285]: INFO nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Took 8.92 seconds to spawn the instance on the hypervisor. [ 809.555804] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.559742] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1594c678-e147-4109-a437-254008ff39d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.620785] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11157384-458c-47a6-9db8-04ee3049670f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.634508] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba391fd-a2f8-49ea-9b13-37a8c2cbeb42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.670755] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43c00a6-9547-402d-b914-0fe3d55dd8c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.678739] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2287c3-96d4-4105-abad-97dee3fd10fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.695188] env[68285]: DEBUG nova.compute.provider_tree [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.979083] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.047084] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 810.081896] env[68285]: INFO nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Took 32.10 seconds to build instance. [ 810.086528] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 810.086909] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.086909] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 810.087535] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.087535] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 810.087535] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 810.087738] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 810.087787] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 810.088060] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 810.088627] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 810.088827] env[68285]: DEBUG nova.virt.hardware [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 810.092018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a7457f-feb3-46d3-9a68-7f967ecdfdd4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.100680] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3440616d-87e2-41e3-b0dc-26ec4d51e0e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.161091] env[68285]: DEBUG nova.compute.manager [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Received event network-changed-03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.161254] env[68285]: DEBUG nova.compute.manager [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Refreshing instance network info cache due to event network-changed-03219bf0-d5df-4a05-8632-cb282cf3fa2e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 810.161453] env[68285]: DEBUG oslo_concurrency.lockutils [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] Acquiring lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.161582] env[68285]: DEBUG oslo_concurrency.lockutils [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] Acquired lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.161743] env[68285]: DEBUG nova.network.neutron [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Refreshing network info cache for port 03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.199336] env[68285]: DEBUG nova.scheduler.client.report [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 810.584081] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.452s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.704885] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.705534] env[68285]: DEBUG nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 810.709541] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.724s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.710974] env[68285]: INFO nova.compute.claims [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.060484] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Successfully updated port: 64daa1ff-7232-4d18-9bb9-18f48ce5df20 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.087128] env[68285]: DEBUG nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 811.221584] env[68285]: DEBUG nova.compute.utils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 811.222081] env[68285]: DEBUG nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 811.222359] env[68285]: DEBUG nova.network.neutron [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 811.258457] env[68285]: DEBUG nova.network.neutron [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updated VIF entry in instance network info cache for port 03219bf0-d5df-4a05-8632-cb282cf3fa2e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.258938] env[68285]: DEBUG nova.network.neutron [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updating instance_info_cache with network_info: [{"id": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "address": "fa:16:3e:4e:b9:bd", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03219bf0-d5", "ovs_interfaceid": "03219bf0-d5df-4a05-8632-cb282cf3fa2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.310213] env[68285]: DEBUG nova.policy [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb08b9cb707a4ba19622181d7019146e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47bd2fd2c1f743e8a6a82e64a7c834e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 811.568383] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "refresh_cache-0d13cc84-bbf2-4e8b-8344-d69acac6bd35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.568383] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "refresh_cache-0d13cc84-bbf2-4e8b-8344-d69acac6bd35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.568383] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.622679] env[68285]: DEBUG nova.network.neutron [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Successfully created port: ca2a4f0d-528f-4c7a-a062-e628578f7f7e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.628491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.733166] env[68285]: DEBUG nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 811.769392] env[68285]: DEBUG oslo_concurrency.lockutils [req-23f07bec-bd6a-482f-94fc-b397e8f19a6c req-a5b29cf2-c421-4cb9-8620-879ad59b1560 service nova] Releasing lock "refresh_cache-e3b01f87-6a4c-4127-9204-2bfa5ff28f38" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.048333] env[68285]: DEBUG nova.compute.manager [req-f89faeb5-1f80-4ead-b0e1-af098ce1d1f8 req-9a8b4d67-8835-415b-89a7-f67bdecd52ff service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Received event network-vif-plugged-64daa1ff-7232-4d18-9bb9-18f48ce5df20 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.048838] env[68285]: DEBUG oslo_concurrency.lockutils [req-f89faeb5-1f80-4ead-b0e1-af098ce1d1f8 req-9a8b4d67-8835-415b-89a7-f67bdecd52ff service nova] Acquiring lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.049174] env[68285]: DEBUG oslo_concurrency.lockutils [req-f89faeb5-1f80-4ead-b0e1-af098ce1d1f8 req-9a8b4d67-8835-415b-89a7-f67bdecd52ff service nova] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.049492] env[68285]: DEBUG oslo_concurrency.lockutils [req-f89faeb5-1f80-4ead-b0e1-af098ce1d1f8 req-9a8b4d67-8835-415b-89a7-f67bdecd52ff service nova] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.049744] env[68285]: DEBUG nova.compute.manager [req-f89faeb5-1f80-4ead-b0e1-af098ce1d1f8 req-9a8b4d67-8835-415b-89a7-f67bdecd52ff service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] No waiting events found dispatching network-vif-plugged-64daa1ff-7232-4d18-9bb9-18f48ce5df20 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 812.050080] env[68285]: WARNING nova.compute.manager [req-f89faeb5-1f80-4ead-b0e1-af098ce1d1f8 req-9a8b4d67-8835-415b-89a7-f67bdecd52ff service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Received unexpected event network-vif-plugged-64daa1ff-7232-4d18-9bb9-18f48ce5df20 for instance with vm_state building and task_state spawning. [ 812.140113] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.329383] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86c7db6-5d44-481e-aec6-6d80946a48b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.338085] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c0a9f6-71d4-413c-83dd-662ac50c97d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.376824] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63c7060-e8b4-4f17-b927-f18b480b98fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.385919] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c3b443-200d-4ff7-811b-776d6cce83ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.400165] env[68285]: DEBUG nova.compute.provider_tree [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.452890] env[68285]: DEBUG nova.network.neutron [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Updating instance_info_cache with network_info: [{"id": "64daa1ff-7232-4d18-9bb9-18f48ce5df20", "address": "fa:16:3e:93:4d:09", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64daa1ff-72", "ovs_interfaceid": "64daa1ff-7232-4d18-9bb9-18f48ce5df20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.600188] env[68285]: DEBUG nova.compute.manager [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Received event network-changed-d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.600302] env[68285]: DEBUG nova.compute.manager [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Refreshing instance network info cache due to event network-changed-d11df654-f231-443e-aa54-91844bb26c2f. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 812.600752] env[68285]: DEBUG oslo_concurrency.lockutils [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] Acquiring lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.600970] env[68285]: DEBUG oslo_concurrency.lockutils [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] Acquired lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.601318] env[68285]: DEBUG nova.network.neutron [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Refreshing network info cache for port d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 812.687819] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "324cc3e5-1c81-498e-b520-e9fca26013ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.687819] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.748982] env[68285]: DEBUG nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 812.774024] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 812.776560] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.776560] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 812.776560] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.776560] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 812.776560] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 812.777249] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 812.777249] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 812.777249] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 812.777249] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 812.777249] env[68285]: DEBUG nova.virt.hardware [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 812.777525] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5045558a-6726-441b-823d-2283ca652119 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.791622] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed95dff7-7f63-409c-af2b-595d459b7822 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.904265] env[68285]: DEBUG nova.scheduler.client.report [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 812.955597] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "refresh_cache-0d13cc84-bbf2-4e8b-8344-d69acac6bd35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.955998] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Instance network_info: |[{"id": "64daa1ff-7232-4d18-9bb9-18f48ce5df20", "address": "fa:16:3e:93:4d:09", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64daa1ff-72", "ovs_interfaceid": "64daa1ff-7232-4d18-9bb9-18f48ce5df20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 812.956576] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:4d:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64daa1ff-7232-4d18-9bb9-18f48ce5df20', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.974042] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.978044] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 812.983276] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-603248e2-0679-477d-9103-66071001411e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.011736] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.011736] env[68285]: value = "task-2891173" [ 813.011736] env[68285]: _type = "Task" [ 813.011736] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.020855] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891173, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.243406] env[68285]: DEBUG nova.network.neutron [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Successfully updated port: ca2a4f0d-528f-4c7a-a062-e628578f7f7e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 813.410778] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.411623] env[68285]: DEBUG nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 813.420210] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.494s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.422643] env[68285]: INFO nova.compute.claims [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.507158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "a97df3d2-c182-46d8-95c2-61caccade285" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.507447] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.507636] env[68285]: DEBUG nova.compute.manager [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Going to confirm migration 1 {{(pid=68285) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 813.523230] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891173, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.706185] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "753bb2f7-bf0a-401e-81af-93982558d3b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.706185] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.714603] env[68285]: DEBUG nova.network.neutron [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updated VIF entry in instance network info cache for port d11df654-f231-443e-aa54-91844bb26c2f. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.714603] env[68285]: DEBUG nova.network.neutron [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updating instance_info_cache with network_info: [{"id": "d11df654-f231-443e-aa54-91844bb26c2f", "address": "fa:16:3e:75:eb:32", "network": {"id": "f3886ae5-9380-4597-a7bd-9f464be650ef", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1859029152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed60286b54da48c7b47b92c6058cb1d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11df654-f2", "ovs_interfaceid": "d11df654-f231-443e-aa54-91844bb26c2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.748477] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "refresh_cache-f0145d64-60e4-4ad5-a6ea-6c5d40780df5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.748570] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired lock "refresh_cache-f0145d64-60e4-4ad5-a6ea-6c5d40780df5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.748689] env[68285]: DEBUG nova.network.neutron [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.062619] env[68285]: DEBUG nova.compute.utils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.071592] env[68285]: DEBUG nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.071991] env[68285]: DEBUG nova.network.neutron [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.085675] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891173, 'name': CreateVM_Task, 'duration_secs': 0.569706} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.085974] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.088174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.088174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.088174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 814.088807] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc75464-355b-4cdc-8c67-2895ec431d91 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.094858] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 814.094858] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b283e3-0434-a90e-48b8-05cc2dde6e3e" [ 814.094858] env[68285]: _type = "Task" [ 814.094858] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.106928] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b283e3-0434-a90e-48b8-05cc2dde6e3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.139537] env[68285]: DEBUG nova.policy [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20f736434c6b457c9ce87771ace6a728', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fd7bc7649b647939584cc01c1f3b5d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.178805] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.178997] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.179210] env[68285]: DEBUG nova.network.neutron [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.179445] env[68285]: DEBUG nova.objects.instance [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lazy-loading 'info_cache' on Instance uuid a97df3d2-c182-46d8-95c2-61caccade285 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 814.217897] env[68285]: DEBUG oslo_concurrency.lockutils [req-a93eb688-51e7-4629-8f4f-4aea802868f0 req-019abe94-bede-45d0-bb6a-519eef1382a0 service nova] Releasing lock "refresh_cache-9f4b2b94-ec19-4a8e-8663-ab71c417d093" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.288290] env[68285]: DEBUG nova.network.neutron [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.511501] env[68285]: DEBUG nova.network.neutron [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Updating instance_info_cache with network_info: [{"id": "ca2a4f0d-528f-4c7a-a062-e628578f7f7e", "address": "fa:16:3e:37:05:c9", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca2a4f0d-52", "ovs_interfaceid": "ca2a4f0d-528f-4c7a-a062-e628578f7f7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.519777] env[68285]: DEBUG nova.network.neutron [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Successfully created port: 462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.574900] env[68285]: DEBUG nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 814.612109] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b283e3-0434-a90e-48b8-05cc2dde6e3e, 'name': SearchDatastore_Task, 'duration_secs': 0.016103} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.612863] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.613307] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.613813] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.616043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.616043] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.616043] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4e26534-a0d8-465f-af37-507b278e30bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.626767] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.627288] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.629083] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d7c0331-9c21-4e93-9491-405933d925f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.639173] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 814.639173] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529556e3-9c9c-500c-e087-7bb228a9a440" [ 814.639173] env[68285]: _type = "Task" [ 814.639173] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.652601] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529556e3-9c9c-500c-e087-7bb228a9a440, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.693607] env[68285]: DEBUG nova.compute.manager [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Received event network-changed-64daa1ff-7232-4d18-9bb9-18f48ce5df20 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.693799] env[68285]: DEBUG nova.compute.manager [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Refreshing instance network info cache due to event network-changed-64daa1ff-7232-4d18-9bb9-18f48ce5df20. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 814.694093] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Acquiring lock "refresh_cache-0d13cc84-bbf2-4e8b-8344-d69acac6bd35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.694997] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Acquired lock "refresh_cache-0d13cc84-bbf2-4e8b-8344-d69acac6bd35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.694997] env[68285]: DEBUG nova.network.neutron [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Refreshing network info cache for port 64daa1ff-7232-4d18-9bb9-18f48ce5df20 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.836031] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.836370] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.836370] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.836474] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.836620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.838710] env[68285]: INFO nova.compute.manager [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Terminating instance [ 814.970332] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a78090-b909-f429-8b7d-ce22ebdf7beb/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 814.972165] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d1dd5c-85a8-4a77-b78b-022b7c0d925c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.980343] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a78090-b909-f429-8b7d-ce22ebdf7beb/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 814.980520] env[68285]: ERROR oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a78090-b909-f429-8b7d-ce22ebdf7beb/disk-0.vmdk due to incomplete transfer. [ 814.980743] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a4a2d810-fea0-4e24-957d-017578661af4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.990242] env[68285]: DEBUG oslo_vmware.rw_handles [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a78090-b909-f429-8b7d-ce22ebdf7beb/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 814.990625] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Uploaded image edbbdcbe-abdd-42f5-9d90-4932e483ae29 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 814.992568] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 814.992851] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-086637e5-5243-48e9-9269-eabda8a6f5c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.999551] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 814.999551] env[68285]: value = "task-2891174" [ 814.999551] env[68285]: _type = "Task" [ 814.999551] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.011463] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891174, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.016130] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Releasing lock "refresh_cache-f0145d64-60e4-4ad5-a6ea-6c5d40780df5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.016428] env[68285]: DEBUG nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Instance network_info: |[{"id": "ca2a4f0d-528f-4c7a-a062-e628578f7f7e", "address": "fa:16:3e:37:05:c9", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca2a4f0d-52", "ovs_interfaceid": "ca2a4f0d-528f-4c7a-a062-e628578f7f7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 815.017056] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:05:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca2a4f0d-528f-4c7a-a062-e628578f7f7e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.027096] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 815.027096] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 815.027096] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a7138c4-d0ef-4980-aa14-ef94de66fb10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.048876] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.048876] env[68285]: value = "task-2891175" [ 815.048876] env[68285]: _type = "Task" [ 815.048876] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.056308] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891175, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.147349] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529556e3-9c9c-500c-e087-7bb228a9a440, 'name': SearchDatastore_Task, 'duration_secs': 0.023697} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.148178] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7217e00-8b04-4524-923b-6428efa2b79f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.155398] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 815.155398] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d4ac85-3020-5c6a-c723-68efc5caa2a8" [ 815.155398] env[68285]: _type = "Task" [ 815.155398] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.166164] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d4ac85-3020-5c6a-c723-68efc5caa2a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.185237] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b2773f-1a2a-450a-8dff-9e1beb0ea6fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.196220] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73baf79b-c9a1-4c85-8215-ee96824248dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.230494] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a26eb39-f633-48c5-ab33-30a5ab1fdacf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.238436] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae25082d-4bd7-4789-a16c-11269857f3f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.259381] env[68285]: DEBUG nova.compute.provider_tree [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.342592] env[68285]: DEBUG nova.compute.manager [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 815.344058] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 815.345067] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10776f7-18cf-4967-88a2-0d5ec8415577 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.353841] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 815.354131] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f94c296-f94e-4865-9f3d-b4297a6a7f0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.361020] env[68285]: DEBUG oslo_vmware.api [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 815.361020] env[68285]: value = "task-2891176" [ 815.361020] env[68285]: _type = "Task" [ 815.361020] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.374908] env[68285]: DEBUG oslo_vmware.api [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.514583] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891174, 'name': Destroy_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.558433] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891175, 'name': CreateVM_Task, 'duration_secs': 0.441372} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.558676] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 815.559499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.559723] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.560095] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 815.560420] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11658a34-e81e-49fc-8355-320975610591 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.564718] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 815.564718] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bd5d03-ddfe-5a86-ab2f-536f8795d5f4" [ 815.564718] env[68285]: _type = "Task" [ 815.564718] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.572697] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bd5d03-ddfe-5a86-ab2f-536f8795d5f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.584981] env[68285]: DEBUG nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 815.610169] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 815.610502] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.610712] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 815.610935] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.611127] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 815.611395] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 815.611592] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 815.611821] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 815.612184] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 815.612246] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 815.612493] env[68285]: DEBUG nova.virt.hardware [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 815.616039] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67f167b-94ec-4f75-be33-ed4e67152933 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.624890] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db52ced-384e-4b26-9890-393983da7536 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.666772] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d4ac85-3020-5c6a-c723-68efc5caa2a8, 'name': SearchDatastore_Task, 'duration_secs': 0.01336} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.666772] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.666772] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 0d13cc84-bbf2-4e8b-8344-d69acac6bd35/0d13cc84-bbf2-4e8b-8344-d69acac6bd35.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.666772] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee8bf588-068b-4e8e-8cec-9253710d8133 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.674550] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 815.674550] env[68285]: value = "task-2891177" [ 815.674550] env[68285]: _type = "Task" [ 815.674550] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.680838] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891177, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.721614] env[68285]: DEBUG nova.network.neutron [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Updated VIF entry in instance network info cache for port 64daa1ff-7232-4d18-9bb9-18f48ce5df20. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 815.722063] env[68285]: DEBUG nova.network.neutron [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Updating instance_info_cache with network_info: [{"id": "64daa1ff-7232-4d18-9bb9-18f48ce5df20", "address": "fa:16:3e:93:4d:09", "network": {"id": "35d696b3-58ec-478c-a919-f64f277bb27a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-475255792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc85bbdab8b44b395f0ebbf88f9df03", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64daa1ff-72", "ovs_interfaceid": "64daa1ff-7232-4d18-9bb9-18f48ce5df20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.724239] env[68285]: DEBUG nova.network.neutron [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance_info_cache with network_info: [{"id": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "address": "fa:16:3e:f3:99:c3", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84cbe58d-a7", "ovs_interfaceid": "84cbe58d-a7c4-4c42-9f87-9a6b62805b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.762138] env[68285]: DEBUG nova.scheduler.client.report [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.876454] env[68285]: DEBUG oslo_vmware.api [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891176, 'name': PowerOffVM_Task, 'duration_secs': 0.496283} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.876732] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 815.876964] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 815.877245] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed0a2f31-8c95-4676-8ed1-4fcf0ff4be75 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.947650] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.947983] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.948289] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Deleting the datastore file [datastore1] 9f4b2b94-ec19-4a8e-8663-ab71c417d093 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.948577] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bfc1204-541a-4574-8861-9e0b081b30ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.956148] env[68285]: DEBUG oslo_vmware.api [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for the task: (returnval){ [ 815.956148] env[68285]: value = "task-2891179" [ 815.956148] env[68285]: _type = "Task" [ 815.956148] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.968858] env[68285]: DEBUG oslo_vmware.api [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.016453] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891174, 'name': Destroy_Task, 'duration_secs': 0.536224} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.016676] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Destroyed the VM [ 816.017115] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 816.017481] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-54d45607-19c8-446f-8bfd-86492147e231 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.028855] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 816.028855] env[68285]: value = "task-2891180" [ 816.028855] env[68285]: _type = "Task" [ 816.028855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.039052] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891180, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.078122] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bd5d03-ddfe-5a86-ab2f-536f8795d5f4, 'name': SearchDatastore_Task, 'duration_secs': 0.014903} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.079190] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.079524] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.079875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.079962] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.080493] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.081152] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad25f596-f6da-4d2c-89f9-51ac79d7acae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.098461] env[68285]: DEBUG nova.network.neutron [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Successfully updated port: 462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.102458] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.102458] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.102976] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8be8a50f-7376-41ed-b538-1a96a2656618 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.110989] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 816.110989] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521b2c33-f8a4-634a-8f3c-54c0a966bd95" [ 816.110989] env[68285]: _type = "Task" [ 816.110989] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.123155] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521b2c33-f8a4-634a-8f3c-54c0a966bd95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.188268] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891177, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.226705] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Releasing lock "refresh_cache-0d13cc84-bbf2-4e8b-8344-d69acac6bd35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.227010] env[68285]: DEBUG nova.compute.manager [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Received event network-vif-plugged-ca2a4f0d-528f-4c7a-a062-e628578f7f7e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.227229] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Acquiring lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.227439] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.227607] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.227774] env[68285]: DEBUG nova.compute.manager [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] No waiting events found dispatching network-vif-plugged-ca2a4f0d-528f-4c7a-a062-e628578f7f7e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.227976] env[68285]: WARNING nova.compute.manager [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Received unexpected event network-vif-plugged-ca2a4f0d-528f-4c7a-a062-e628578f7f7e for instance with vm_state building and task_state spawning. [ 816.228177] env[68285]: DEBUG nova.compute.manager [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Received event network-changed-ca2a4f0d-528f-4c7a-a062-e628578f7f7e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.228334] env[68285]: DEBUG nova.compute.manager [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Refreshing instance network info cache due to event network-changed-ca2a4f0d-528f-4c7a-a062-e628578f7f7e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 816.228517] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Acquiring lock "refresh_cache-f0145d64-60e4-4ad5-a6ea-6c5d40780df5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.228680] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Acquired lock "refresh_cache-f0145d64-60e4-4ad5-a6ea-6c5d40780df5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.228799] env[68285]: DEBUG nova.network.neutron [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Refreshing network info cache for port ca2a4f0d-528f-4c7a-a062-e628578f7f7e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.234088] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-a97df3d2-c182-46d8-95c2-61caccade285" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.234341] env[68285]: DEBUG nova.objects.instance [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lazy-loading 'migration_context' on Instance uuid a97df3d2-c182-46d8-95c2-61caccade285 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 816.270233] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.853s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.270948] env[68285]: DEBUG nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.274916] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.208s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.282612] env[68285]: INFO nova.compute.claims [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.466438] env[68285]: DEBUG oslo_vmware.api [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Task: {'id': task-2891179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.507561} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.466864] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 816.466971] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 816.467471] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 816.467471] env[68285]: INFO nova.compute.manager [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Took 1.12 seconds to destroy the instance on the hypervisor. [ 816.467684] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 816.467923] env[68285]: DEBUG nova.compute.manager [-] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 816.468111] env[68285]: DEBUG nova.network.neutron [-] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 816.542838] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891180, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.604236] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.604236] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.604236] env[68285]: DEBUG nova.network.neutron [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 816.622171] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521b2c33-f8a4-634a-8f3c-54c0a966bd95, 'name': SearchDatastore_Task, 'duration_secs': 0.069572} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.622988] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ba7620e-a3b4-4e3b-8442-bdf783620699 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.628984] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 816.628984] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]526e238a-b877-e3d3-834c-c06a6a11949d" [ 816.628984] env[68285]: _type = "Task" [ 816.628984] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.638318] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526e238a-b877-e3d3-834c-c06a6a11949d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.691418] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891177, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640631} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.691418] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 0d13cc84-bbf2-4e8b-8344-d69acac6bd35/0d13cc84-bbf2-4e8b-8344-d69acac6bd35.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.691418] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.691418] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5589e210-d52e-4162-bc4c-fff38983dfee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.697547] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 816.697547] env[68285]: value = "task-2891181" [ 816.697547] env[68285]: _type = "Task" [ 816.697547] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.707680] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.741025] env[68285]: DEBUG nova.objects.base [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 816.742180] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788ac9d0-b244-4183-b066-b5080815a14b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.769393] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25e1f0e8-a4c5-4895-9195-4d601db89018 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.781454] env[68285]: DEBUG oslo_vmware.api [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 816.781454] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52025322-777c-2329-bd02-bd978e36ef6b" [ 816.781454] env[68285]: _type = "Task" [ 816.781454] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.798873] env[68285]: DEBUG nova.compute.utils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 816.798873] env[68285]: DEBUG oslo_vmware.api [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52025322-777c-2329-bd02-bd978e36ef6b, 'name': SearchDatastore_Task, 'duration_secs': 0.007989} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.798873] env[68285]: DEBUG nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 816.798873] env[68285]: DEBUG nova.network.neutron [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.801080] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.921214] env[68285]: DEBUG nova.policy [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e07beda91348ff873672d51166c05e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53261bb9432948b58692227101a4717b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.048050] env[68285]: DEBUG oslo_vmware.api [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891180, 'name': RemoveSnapshot_Task, 'duration_secs': 0.816912} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.048050] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 817.050788] env[68285]: INFO nova.compute.manager [None req-ab98e372-2b7d-405f-8128-a86a4c7a87b9 tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Took 17.85 seconds to snapshot the instance on the hypervisor. [ 817.139463] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526e238a-b877-e3d3-834c-c06a6a11949d, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.139750] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.140123] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f0145d64-60e4-4ad5-a6ea-6c5d40780df5/f0145d64-60e4-4ad5-a6ea-6c5d40780df5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.140551] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02d7e313-e75d-4d37-9c57-d648bec4da9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.160240] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 817.160240] env[68285]: value = "task-2891182" [ 817.160240] env[68285]: _type = "Task" [ 817.160240] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.173795] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891182, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.198897] env[68285]: DEBUG nova.network.neutron [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.212601] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073646} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.215414] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.216328] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96667eec-0885-4463-82bc-ccfcd9dbbe0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.248288] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 0d13cc84-bbf2-4e8b-8344-d69acac6bd35/0d13cc84-bbf2-4e8b-8344-d69acac6bd35.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.249561] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cceb1b0-1baf-4772-9051-116d7fc8641d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.271357] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 817.271357] env[68285]: value = "task-2891183" [ 817.271357] env[68285]: _type = "Task" [ 817.271357] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.279932] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891183, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.306641] env[68285]: DEBUG nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.313642] env[68285]: DEBUG nova.compute.manager [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Received event network-vif-plugged-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 817.313642] env[68285]: DEBUG oslo_concurrency.lockutils [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.313642] env[68285]: DEBUG oslo_concurrency.lockutils [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.313778] env[68285]: DEBUG oslo_concurrency.lockutils [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.315057] env[68285]: DEBUG nova.compute.manager [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] No waiting events found dispatching network-vif-plugged-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 817.315057] env[68285]: WARNING nova.compute.manager [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Received unexpected event network-vif-plugged-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 for instance with vm_state building and task_state spawning. [ 817.315057] env[68285]: DEBUG nova.compute.manager [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Received event network-changed-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 817.315057] env[68285]: DEBUG nova.compute.manager [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Refreshing instance network info cache due to event network-changed-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 817.315057] env[68285]: DEBUG oslo_concurrency.lockutils [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] Acquiring lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.500253] env[68285]: DEBUG nova.network.neutron [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updating instance_info_cache with network_info: [{"id": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "address": "fa:16:3e:c7:6a:21", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462b7f0c-cb", "ovs_interfaceid": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.539835] env[68285]: DEBUG nova.network.neutron [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Updated VIF entry in instance network info cache for port ca2a4f0d-528f-4c7a-a062-e628578f7f7e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 817.540321] env[68285]: DEBUG nova.network.neutron [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Updating instance_info_cache with network_info: [{"id": "ca2a4f0d-528f-4c7a-a062-e628578f7f7e", "address": "fa:16:3e:37:05:c9", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca2a4f0d-52", "ovs_interfaceid": "ca2a4f0d-528f-4c7a-a062-e628578f7f7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.675712] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891182, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.691690] env[68285]: DEBUG nova.network.neutron [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Successfully created port: 320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.784820] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891183, 'name': ReconfigVM_Task, 'duration_secs': 0.373178} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.788105] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 0d13cc84-bbf2-4e8b-8344-d69acac6bd35/0d13cc84-bbf2-4e8b-8344-d69acac6bd35.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.789985] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8804c5b-7f32-438c-8f32-998ea9501729 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.796942] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 817.796942] env[68285]: value = "task-2891184" [ 817.796942] env[68285]: _type = "Task" [ 817.796942] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.816668] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891184, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.942862] env[68285]: DEBUG nova.network.neutron [-] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.001338] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cf5e49-722b-4740-a4e5-e919bf1311f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.004523] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.004831] env[68285]: DEBUG nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Instance network_info: |[{"id": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "address": "fa:16:3e:c7:6a:21", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462b7f0c-cb", "ovs_interfaceid": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 818.005140] env[68285]: DEBUG oslo_concurrency.lockutils [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] Acquired lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.005322] env[68285]: DEBUG nova.network.neutron [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Refreshing network info cache for port 462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.006665] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:6a:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.014060] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Creating folder: Project (5fd7bc7649b647939584cc01c1f3b5d0). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.015665] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c49671c7-8ee4-4ec1-a626-baecc3d39f7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.020919] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad35d63-34bf-4547-9d4e-0c9fb46bf4c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.053270] env[68285]: DEBUG oslo_concurrency.lockutils [req-7a1f5d45-91ab-45b8-bfa5-d52eec3dad65 req-185786db-18bd-4fd5-9214-9ca998d3db01 service nova] Releasing lock "refresh_cache-f0145d64-60e4-4ad5-a6ea-6c5d40780df5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.055627] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec25d42-5751-4771-b8d6-a2cc9fb228a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.058349] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Created folder: Project (5fd7bc7649b647939584cc01c1f3b5d0) in parent group-v580775. [ 818.058560] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Creating folder: Instances. Parent ref: group-v580838. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.058751] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f2e9397-d07d-47aa-a58a-43ff9e71b103 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.065606] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5ebd0a-6d9c-4b5f-aec5-f6bea38c01d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.071143] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Created folder: Instances in parent group-v580838. [ 818.071233] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.071844] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.071951] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-572bab28-052f-4b3e-b995-897b2e54765c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.096339] env[68285]: DEBUG nova.compute.provider_tree [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.102761] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.102761] env[68285]: value = "task-2891187" [ 818.102761] env[68285]: _type = "Task" [ 818.102761] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.110665] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891187, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.171713] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891182, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60342} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.171713] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f0145d64-60e4-4ad5-a6ea-6c5d40780df5/f0145d64-60e4-4ad5-a6ea-6c5d40780df5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 818.171713] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.172367] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5df885a8-dbca-4091-b55c-b0e740fd7b98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.178590] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 818.178590] env[68285]: value = "task-2891188" [ 818.178590] env[68285]: _type = "Task" [ 818.178590] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.187210] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.306506] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891184, 'name': Rename_Task, 'duration_secs': 0.316433} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.306766] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.307138] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-910e13d3-8f18-4fc5-b314-f1f1598e3b9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.313480] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 818.313480] env[68285]: value = "task-2891189" [ 818.313480] env[68285]: _type = "Task" [ 818.313480] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.321751] env[68285]: DEBUG nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.323549] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.343146] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.343330] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.343526] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.343621] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.343889] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.344128] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.344347] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.344527] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.344727] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.344903] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.345129] env[68285]: DEBUG nova.virt.hardware [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.346113] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef3527b-9de8-4afc-b253-0a6b4af6fee4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.354914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff4ffd1-12b8-4d0e-b7da-9469c6a54dae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.448738] env[68285]: INFO nova.compute.manager [-] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Took 1.98 seconds to deallocate network for instance. [ 818.615983] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891187, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.626762] env[68285]: ERROR nova.scheduler.client.report [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [req-f48d2404-9d52-4fb8-8fbb-4c4b806ba97c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f48d2404-9d52-4fb8-8fbb-4c4b806ba97c"}]} [ 818.652777] env[68285]: DEBUG nova.scheduler.client.report [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 818.669898] env[68285]: DEBUG nova.scheduler.client.report [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 818.670107] env[68285]: DEBUG nova.compute.provider_tree [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.688850] env[68285]: DEBUG nova.scheduler.client.report [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 818.693190] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082514} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.693436] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.694307] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cceb29-86f0-455c-8ca5-a1e556cd9a23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.721950] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] f0145d64-60e4-4ad5-a6ea-6c5d40780df5/f0145d64-60e4-4ad5-a6ea-6c5d40780df5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.723453] env[68285]: DEBUG nova.scheduler.client.report [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 818.729342] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-389b285e-83c8-48cf-ab0f-0f81207f591a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.749347] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 818.749347] env[68285]: value = "task-2891190" [ 818.749347] env[68285]: _type = "Task" [ 818.749347] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.759694] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891190, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.828461] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891189, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.928824] env[68285]: DEBUG nova.network.neutron [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updated VIF entry in instance network info cache for port 462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 818.928824] env[68285]: DEBUG nova.network.neutron [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updating instance_info_cache with network_info: [{"id": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "address": "fa:16:3e:c7:6a:21", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462b7f0c-cb", "ovs_interfaceid": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.955531] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.114955] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891187, 'name': CreateVM_Task, 'duration_secs': 0.625567} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.115280] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.116120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.116328] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.116642] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 819.116917] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faf8fe8a-edbe-4661-8a50-4aba8293601b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.124386] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 819.124386] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525004ef-a58e-4788-91d1-786148fdf3b9" [ 819.124386] env[68285]: _type = "Task" [ 819.124386] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.132415] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525004ef-a58e-4788-91d1-786148fdf3b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.262566] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891190, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.325177] env[68285]: DEBUG oslo_vmware.api [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891189, 'name': PowerOnVM_Task, 'duration_secs': 0.634665} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.325524] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.325652] env[68285]: INFO nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Took 9.28 seconds to spawn the instance on the hypervisor. [ 819.325822] env[68285]: DEBUG nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.326606] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68452897-de2e-4182-8404-d8ee51d47fea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.336940] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc2b37b-4bf4-4e16-8221-5aa45f9bfb5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.343792] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16036320-5a69-4569-9b32-75920e01c080 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.377179] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc1ee53-4627-41be-83c9-2641155742a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.385428] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91af2ccf-4cb1-4804-8d45-6a9368816eae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.390109] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.390350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.390563] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.390743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.390907] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.393102] env[68285]: INFO nova.compute.manager [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Terminating instance [ 819.402744] env[68285]: DEBUG nova.compute.provider_tree [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 819.432502] env[68285]: DEBUG oslo_concurrency.lockutils [req-17df6ecf-b93b-484d-b069-c79bcd0bc021 req-55b2d7f3-53ed-49f6-80df-d0520c3b0843 service nova] Releasing lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.638975] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525004ef-a58e-4788-91d1-786148fdf3b9, 'name': SearchDatastore_Task, 'duration_secs': 0.023871} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.639320] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.640016] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.641081] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.641081] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.641560] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.641560] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59e6fd6a-0f10-4c57-8685-30b5934cfb07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.653036] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.653275] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.654021] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d04b2bfc-380d-48f7-824a-c980fb4bfefb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.660950] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 819.660950] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5206440c-acf6-7b8b-96ee-3d57d4feb927" [ 819.660950] env[68285]: _type = "Task" [ 819.660950] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.670162] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5206440c-acf6-7b8b-96ee-3d57d4feb927, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.705744] env[68285]: DEBUG nova.compute.manager [req-da8e878f-2dfd-4968-a3e4-96933c0e5552 req-d0064799-c30b-4dc3-bf40-db0434496429 service nova] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Received event network-vif-deleted-d11df654-f231-443e-aa54-91844bb26c2f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 819.707826] env[68285]: DEBUG nova.network.neutron [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Successfully updated port: 320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.762852] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891190, 'name': ReconfigVM_Task, 'duration_secs': 0.54154} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.762852] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Reconfigured VM instance instance-00000016 to attach disk [datastore1] f0145d64-60e4-4ad5-a6ea-6c5d40780df5/f0145d64-60e4-4ad5-a6ea-6c5d40780df5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.762852] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5357ded-2241-4b40-9d12-d67d6ef482e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.769999] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 819.769999] env[68285]: value = "task-2891191" [ 819.769999] env[68285]: _type = "Task" [ 819.769999] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.779846] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891191, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.784988] env[68285]: DEBUG nova.compute.manager [req-15671579-67c7-48df-8b32-f6adb577dfdc req-3fa7ce13-3ea5-439f-9c9e-fb771ede834c service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Received event network-vif-plugged-320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 819.785287] env[68285]: DEBUG oslo_concurrency.lockutils [req-15671579-67c7-48df-8b32-f6adb577dfdc req-3fa7ce13-3ea5-439f-9c9e-fb771ede834c service nova] Acquiring lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.785543] env[68285]: DEBUG oslo_concurrency.lockutils [req-15671579-67c7-48df-8b32-f6adb577dfdc req-3fa7ce13-3ea5-439f-9c9e-fb771ede834c service nova] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.785675] env[68285]: DEBUG oslo_concurrency.lockutils [req-15671579-67c7-48df-8b32-f6adb577dfdc req-3fa7ce13-3ea5-439f-9c9e-fb771ede834c service nova] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.785849] env[68285]: DEBUG nova.compute.manager [req-15671579-67c7-48df-8b32-f6adb577dfdc req-3fa7ce13-3ea5-439f-9c9e-fb771ede834c service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] No waiting events found dispatching network-vif-plugged-320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 819.787334] env[68285]: WARNING nova.compute.manager [req-15671579-67c7-48df-8b32-f6adb577dfdc req-3fa7ce13-3ea5-439f-9c9e-fb771ede834c service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Received unexpected event network-vif-plugged-320c995b-dad7-40a2-90c1-1e0f3065e6cb for instance with vm_state building and task_state spawning. [ 819.851899] env[68285]: INFO nova.compute.manager [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Took 35.93 seconds to build instance. [ 819.911578] env[68285]: DEBUG nova.compute.manager [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 819.911761] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.913996] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78b39e7-7ba5-480a-af09-4130dc457fac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.929653] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.930254] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f3db07c-01ae-44a9-8834-123a96fc6819 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.939655] env[68285]: DEBUG oslo_vmware.api [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 819.939655] env[68285]: value = "task-2891192" [ 819.939655] env[68285]: _type = "Task" [ 819.939655] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.949279] env[68285]: DEBUG oslo_vmware.api [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.958283] env[68285]: DEBUG nova.scheduler.client.report [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 48 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 819.959218] env[68285]: DEBUG nova.compute.provider_tree [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 48 to 49 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 819.959218] env[68285]: DEBUG nova.compute.provider_tree [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 820.175027] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5206440c-acf6-7b8b-96ee-3d57d4feb927, 'name': SearchDatastore_Task, 'duration_secs': 0.008664} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.176246] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80eef2c6-d787-4836-b585-b1a6064160c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.185177] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 820.185177] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529f6eeb-0b40-6a1f-a390-163221ff02de" [ 820.185177] env[68285]: _type = "Task" [ 820.185177] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.193427] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529f6eeb-0b40-6a1f-a390-163221ff02de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.213456] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.213929] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.215447] env[68285]: DEBUG nova.network.neutron [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.281749] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891191, 'name': Rename_Task, 'duration_secs': 0.14905} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.283371] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.283674] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82accbb6-11d9-4e35-af6b-975173de6210 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.293938] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 820.293938] env[68285]: value = "task-2891193" [ 820.293938] env[68285]: _type = "Task" [ 820.293938] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.304500] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.352118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e4697e67-26e3-4f29-a6b4-fd305117e767 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.184s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.451022] env[68285]: DEBUG oslo_vmware.api [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891192, 'name': PowerOffVM_Task, 'duration_secs': 0.200407} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.451341] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 820.451478] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.451790] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-648f6dd4-ff34-4158-a0a9-c15cf3497c19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.464359] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.190s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.465443] env[68285]: DEBUG nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 820.468089] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.961s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.469444] env[68285]: INFO nova.compute.claims [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.667506] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.667734] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.667911] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Deleting the datastore file [datastore1] ef0636f4-3149-44e8-a4a3-62b9ede5dc28 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.668260] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96af664d-d313-49ac-a5df-5ba13db1c7bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.678419] env[68285]: DEBUG oslo_vmware.api [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for the task: (returnval){ [ 820.678419] env[68285]: value = "task-2891195" [ 820.678419] env[68285]: _type = "Task" [ 820.678419] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.689310] env[68285]: DEBUG oslo_vmware.api [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.701150] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529f6eeb-0b40-6a1f-a390-163221ff02de, 'name': SearchDatastore_Task, 'duration_secs': 0.009183} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.702053] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.702053] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5e101d74-7a82-4118-8f4c-7af9a6b0917a/5e101d74-7a82-4118-8f4c-7af9a6b0917a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.702053] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-823e96c7-0331-45ba-a31d-e4aa6356d47e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.712801] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 820.712801] env[68285]: value = "task-2891196" [ 820.712801] env[68285]: _type = "Task" [ 820.712801] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.726056] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.800479] env[68285]: DEBUG nova.network.neutron [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.816983] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891193, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.855387] env[68285]: DEBUG nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 820.906315] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.906617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.974393] env[68285]: DEBUG nova.compute.utils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 820.981174] env[68285]: DEBUG nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 820.981174] env[68285]: DEBUG nova.network.neutron [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.121529] env[68285]: DEBUG nova.network.neutron [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updating instance_info_cache with network_info: [{"id": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "address": "fa:16:3e:90:33:19", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320c995b-da", "ovs_interfaceid": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.143161] env[68285]: DEBUG nova.policy [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '255d34e6b0fc47c7be886b2311c74309', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74d2b141f0044c8985eae7c380a03466', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 821.192359] env[68285]: DEBUG oslo_vmware.api [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Task: {'id': task-2891195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165952} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.192663] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 821.192854] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 821.193374] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 821.193568] env[68285]: INFO nova.compute.manager [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Took 1.28 seconds to destroy the instance on the hypervisor. [ 821.193850] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.194157] env[68285]: DEBUG nova.compute.manager [-] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 821.194234] env[68285]: DEBUG nova.network.neutron [-] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.226080] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891196, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.304462] env[68285]: DEBUG oslo_vmware.api [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891193, 'name': PowerOnVM_Task, 'duration_secs': 0.982707} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.304733] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.304992] env[68285]: INFO nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Took 8.56 seconds to spawn the instance on the hypervisor. [ 821.305260] env[68285]: DEBUG nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.306538] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79248be8-4aef-443d-ad87-953e8ee8b38c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.388275] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.479419] env[68285]: DEBUG nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 821.625690] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Releasing lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.626150] env[68285]: DEBUG nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance network_info: |[{"id": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "address": "fa:16:3e:90:33:19", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320c995b-da", "ovs_interfaceid": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 821.626683] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:33:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2c019b6-3ef3-4c8f-95bd-edede2c554a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '320c995b-dad7-40a2-90c1-1e0f3065e6cb', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.634817] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.642259] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.642751] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed1e3027-58b2-4348-9f2c-085410a23631 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.669675] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.669675] env[68285]: value = "task-2891197" [ 821.669675] env[68285]: _type = "Task" [ 821.669675] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.677884] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891197, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.727795] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53701} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.727795] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5e101d74-7a82-4118-8f4c-7af9a6b0917a/5e101d74-7a82-4118-8f4c-7af9a6b0917a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 821.727795] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 821.727795] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a880b430-9312-45a0-9f69-6c7b78322aef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.733656] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 821.733656] env[68285]: value = "task-2891198" [ 821.733656] env[68285]: _type = "Task" [ 821.733656] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.750474] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.837273] env[68285]: INFO nova.compute.manager [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Took 31.22 seconds to build instance. [ 821.854056] env[68285]: DEBUG nova.network.neutron [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Successfully created port: 62912856-783f-41a2-8ea4-8d02547b1b9a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 822.180782] env[68285]: DEBUG nova.compute.manager [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Received event network-changed-320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.181076] env[68285]: DEBUG nova.compute.manager [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Refreshing instance network info cache due to event network-changed-320c995b-dad7-40a2-90c1-1e0f3065e6cb. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 822.182110] env[68285]: DEBUG oslo_concurrency.lockutils [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] Acquiring lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.182110] env[68285]: DEBUG oslo_concurrency.lockutils [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] Acquired lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.182110] env[68285]: DEBUG nova.network.neutron [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Refreshing network info cache for port 320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 822.191408] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d9648f-f49e-4941-aac6-a11d9d646d83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.191408] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891197, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.198410] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881bad1b-c1e7-43f0-8a6c-45a97257bc0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.236931] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f3497a-3f2f-4dbd-9737-8880f85f1903 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.250376] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a37294-ccfe-4003-9cff-8d77927fad26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.255403] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071346} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.255403] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.256030] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eef69d0-0034-4e06-8e2e-a57ead1fff63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.270018] env[68285]: DEBUG nova.compute.provider_tree [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.284741] env[68285]: DEBUG nova.network.neutron [-] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.293976] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 5e101d74-7a82-4118-8f4c-7af9a6b0917a/5e101d74-7a82-4118-8f4c-7af9a6b0917a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.295221] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ec70a5b-4bae-4a95-8da9-f5a8951675c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.318630] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 822.318630] env[68285]: value = "task-2891199" [ 822.318630] env[68285]: _type = "Task" [ 822.318630] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.328361] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891199, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.336822] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31d9c9d3-d4cf-4709-8787-d86ac161c7cc tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.496s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.497705] env[68285]: DEBUG nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 822.524909] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 822.525158] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.525460] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 822.526361] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.526361] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 822.526361] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 822.526361] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 822.526361] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 822.526611] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 822.526611] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 822.526735] env[68285]: DEBUG nova.virt.hardware [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 822.527676] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707f537a-d648-4ba3-85fe-f695eff99243 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.538323] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb69909-644e-4313-846d-35efc47f97ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.687443] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891197, 'name': CreateVM_Task, 'duration_secs': 0.579281} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.687735] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.688611] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.688893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.692110] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 822.692110] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b843ab51-6dc3-4fdf-85ac-73e436e5ce42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.699977] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 822.699977] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5214c2f8-198c-e1dc-9fde-b653cf04948e" [ 822.699977] env[68285]: _type = "Task" [ 822.699977] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.712021] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5214c2f8-198c-e1dc-9fde-b653cf04948e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.775441] env[68285]: DEBUG nova.scheduler.client.report [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.796077] env[68285]: INFO nova.compute.manager [-] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Took 1.60 seconds to deallocate network for instance. [ 822.837029] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.841785] env[68285]: DEBUG nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 823.076430] env[68285]: DEBUG nova.network.neutron [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updated VIF entry in instance network info cache for port 320c995b-dad7-40a2-90c1-1e0f3065e6cb. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 823.076430] env[68285]: DEBUG nova.network.neutron [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updating instance_info_cache with network_info: [{"id": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "address": "fa:16:3e:90:33:19", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320c995b-da", "ovs_interfaceid": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.094624] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "81fe4854-1094-4c42-9df5-05325d961146" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.094968] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "81fe4854-1094-4c42-9df5-05325d961146" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.095209] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "81fe4854-1094-4c42-9df5-05325d961146-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.095404] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "81fe4854-1094-4c42-9df5-05325d961146-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.095571] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "81fe4854-1094-4c42-9df5-05325d961146-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.099187] env[68285]: INFO nova.compute.manager [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Terminating instance [ 823.213648] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5214c2f8-198c-e1dc-9fde-b653cf04948e, 'name': SearchDatastore_Task, 'duration_secs': 0.033764} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.213648] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.213648] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 823.213648] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.214166] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.214166] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 823.214166] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8af88ac6-86cf-452e-9c72-b94a782ba153 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.227706] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 823.227919] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 823.228766] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91d93a1b-9af5-4116-9ab2-5e437ff25d16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.235671] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 823.235671] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d7c056-a9f4-692d-c5bc-67233f521440" [ 823.235671] env[68285]: _type = "Task" [ 823.235671] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.245066] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d7c056-a9f4-692d-c5bc-67233f521440, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.290201] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.822s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.290892] env[68285]: DEBUG nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.294010] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.197s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.295610] env[68285]: INFO nova.compute.claims [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.303993] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.333051] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891199, 'name': ReconfigVM_Task, 'duration_secs': 0.699162} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.333394] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 5e101d74-7a82-4118-8f4c-7af9a6b0917a/5e101d74-7a82-4118-8f4c-7af9a6b0917a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.334569] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aadb2849-3ea0-40f6-b079-dfce6f320990 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.344966] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 823.344966] env[68285]: value = "task-2891200" [ 823.344966] env[68285]: _type = "Task" [ 823.344966] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.367705] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891200, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.374417] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.442635] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.443120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.565294] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f3f0ed-9684-43e0-acca-0935c83df72b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.571307] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a54fd7-5dd7-4858-83bd-b397f66cfc7e tempest-ServersAdminNegativeTestJSON-238518542 tempest-ServersAdminNegativeTestJSON-238518542-project-admin] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Suspending the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 823.571773] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a1ccd38d-95d0-4e2a-bab3-e3c0222aca70 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.585882] env[68285]: DEBUG oslo_concurrency.lockutils [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] Releasing lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.585882] env[68285]: DEBUG nova.compute.manager [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Received event network-vif-deleted-c1fb0925-6895-4803-ab32-896f8eb94202 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.585882] env[68285]: INFO nova.compute.manager [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Neutron deleted interface c1fb0925-6895-4803-ab32-896f8eb94202; detaching it from the instance and deleting it from the info cache [ 823.585882] env[68285]: DEBUG nova.network.neutron [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.588140] env[68285]: DEBUG oslo_vmware.api [None req-c4a54fd7-5dd7-4858-83bd-b397f66cfc7e tempest-ServersAdminNegativeTestJSON-238518542 tempest-ServersAdminNegativeTestJSON-238518542-project-admin] Waiting for the task: (returnval){ [ 823.588140] env[68285]: value = "task-2891201" [ 823.588140] env[68285]: _type = "Task" [ 823.588140] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.598938] env[68285]: DEBUG oslo_vmware.api [None req-c4a54fd7-5dd7-4858-83bd-b397f66cfc7e tempest-ServersAdminNegativeTestJSON-238518542 tempest-ServersAdminNegativeTestJSON-238518542-project-admin] Task: {'id': task-2891201, 'name': SuspendVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.606344] env[68285]: DEBUG nova.compute.manager [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 823.606769] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.610848] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d084092-55ac-4d97-91ec-796e4bad15bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.621206] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.621206] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8dd6223e-ab17-486b-a5bf-643a3481a5b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.633536] env[68285]: DEBUG oslo_vmware.api [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 823.633536] env[68285]: value = "task-2891202" [ 823.633536] env[68285]: _type = "Task" [ 823.633536] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.643355] env[68285]: DEBUG oslo_vmware.api [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.758199] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d7c056-a9f4-692d-c5bc-67233f521440, 'name': SearchDatastore_Task, 'duration_secs': 0.031799} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.761176] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11dbcbaa-9971-4aa7-85a7-febbcd71806d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.767938] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 823.767938] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521c159a-7f2d-2fb0-d740-80e271639ad3" [ 823.767938] env[68285]: _type = "Task" [ 823.767938] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.782712] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521c159a-7f2d-2fb0-d740-80e271639ad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.801355] env[68285]: DEBUG nova.compute.utils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 823.806488] env[68285]: DEBUG nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 823.806705] env[68285]: DEBUG nova.network.neutron [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.863137] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891200, 'name': Rename_Task, 'duration_secs': 0.335263} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.865741] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.866634] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35f85d95-4292-45ec-b4d3-0aee346c79c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.876205] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 823.876205] env[68285]: value = "task-2891203" [ 823.876205] env[68285]: _type = "Task" [ 823.876205] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.889037] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891203, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.915392] env[68285]: DEBUG nova.network.neutron [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Successfully updated port: 62912856-783f-41a2-8ea4-8d02547b1b9a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.932561] env[68285]: DEBUG nova.policy [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '557a46b01bbf41e4a343d20c8206aa96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9352aafac6e049feb8d74a91d1600224', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.090428] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e16bbd9f-70a6-4830-a255-a50495c42fda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.105361] env[68285]: DEBUG oslo_vmware.api [None req-c4a54fd7-5dd7-4858-83bd-b397f66cfc7e tempest-ServersAdminNegativeTestJSON-238518542 tempest-ServersAdminNegativeTestJSON-238518542-project-admin] Task: {'id': task-2891201, 'name': SuspendVM_Task} progress is 50%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.113018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003b1099-62b4-4e69-ac4b-4653c6129ded {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.156383] env[68285]: DEBUG nova.compute.manager [req-25e36795-4f06-44e8-92fe-667712008048 req-27312a74-0f01-4723-bbed-f4166345c777 service nova] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Detach interface failed, port_id=c1fb0925-6895-4803-ab32-896f8eb94202, reason: Instance ef0636f4-3149-44e8-a4a3-62b9ede5dc28 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 824.164176] env[68285]: DEBUG oslo_vmware.api [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891202, 'name': PowerOffVM_Task, 'duration_secs': 0.316791} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.164490] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.164887] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.165270] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42f7780f-2ae7-423b-afc5-5185b4710b90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.248310] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.248310] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.248310] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleting the datastore file [datastore2] 81fe4854-1094-4c42-9df5-05325d961146 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.249138] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c87cc8a-3fc0-4a11-b643-be3527ad551a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.263789] env[68285]: DEBUG oslo_vmware.api [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 824.263789] env[68285]: value = "task-2891205" [ 824.263789] env[68285]: _type = "Task" [ 824.263789] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.279282] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521c159a-7f2d-2fb0-d740-80e271639ad3, 'name': SearchDatastore_Task, 'duration_secs': 0.025974} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.282938] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.282938] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 8bedba57-e7c8-4fa8-b171-f6d74550a31c/8bedba57-e7c8-4fa8-b171-f6d74550a31c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 824.283220] env[68285]: DEBUG oslo_vmware.api [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891205, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.283320] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55e26bf9-4c15-4e3d-b8f2-2954a15fe672 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.294713] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 824.294713] env[68285]: value = "task-2891206" [ 824.294713] env[68285]: _type = "Task" [ 824.294713] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.303671] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.307427] env[68285]: DEBUG nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.349410] env[68285]: DEBUG nova.compute.manager [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Received event network-vif-plugged-62912856-783f-41a2-8ea4-8d02547b1b9a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.349760] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] Acquiring lock "95f5e902-6385-4602-8458-7d7b2069a9da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.350154] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] Lock "95f5e902-6385-4602-8458-7d7b2069a9da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.350388] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] Lock "95f5e902-6385-4602-8458-7d7b2069a9da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.350718] env[68285]: DEBUG nova.compute.manager [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] No waiting events found dispatching network-vif-plugged-62912856-783f-41a2-8ea4-8d02547b1b9a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 824.350947] env[68285]: WARNING nova.compute.manager [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Received unexpected event network-vif-plugged-62912856-783f-41a2-8ea4-8d02547b1b9a for instance with vm_state building and task_state spawning. [ 824.351217] env[68285]: DEBUG nova.compute.manager [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Received event network-changed-62912856-783f-41a2-8ea4-8d02547b1b9a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.351356] env[68285]: DEBUG nova.compute.manager [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Refreshing instance network info cache due to event network-changed-62912856-783f-41a2-8ea4-8d02547b1b9a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 824.351816] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] Acquiring lock "refresh_cache-95f5e902-6385-4602-8458-7d7b2069a9da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.351918] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] Acquired lock "refresh_cache-95f5e902-6385-4602-8458-7d7b2069a9da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.352130] env[68285]: DEBUG nova.network.neutron [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Refreshing network info cache for port 62912856-783f-41a2-8ea4-8d02547b1b9a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.388424] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891203, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.419350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "refresh_cache-95f5e902-6385-4602-8458-7d7b2069a9da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.487972] env[68285]: DEBUG nova.network.neutron [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Successfully created port: 13f01d57-9418-46a8-90cb-0fa78c30305f {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.607419] env[68285]: DEBUG oslo_vmware.api [None req-c4a54fd7-5dd7-4858-83bd-b397f66cfc7e tempest-ServersAdminNegativeTestJSON-238518542 tempest-ServersAdminNegativeTestJSON-238518542-project-admin] Task: {'id': task-2891201, 'name': SuspendVM_Task, 'duration_secs': 0.845347} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.607581] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a54fd7-5dd7-4858-83bd-b397f66cfc7e tempest-ServersAdminNegativeTestJSON-238518542 tempest-ServersAdminNegativeTestJSON-238518542-project-admin] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Suspended the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 824.607818] env[68285]: DEBUG nova.compute.manager [None req-c4a54fd7-5dd7-4858-83bd-b397f66cfc7e tempest-ServersAdminNegativeTestJSON-238518542 tempest-ServersAdminNegativeTestJSON-238518542-project-admin] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.608691] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f960630e-7efd-443a-ac7d-8d32cd7e0157 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.776814] env[68285]: DEBUG oslo_vmware.api [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891205, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39914} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.782295] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.782295] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.782295] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.782295] env[68285]: INFO nova.compute.manager [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Took 1.18 seconds to destroy the instance on the hypervisor. [ 824.782544] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 824.784079] env[68285]: DEBUG nova.compute.manager [-] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 824.784079] env[68285]: DEBUG nova.network.neutron [-] [instance: 81fe4854-1094-4c42-9df5-05325d961146] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.816717] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891206, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.894673] env[68285]: DEBUG oslo_vmware.api [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891203, 'name': PowerOnVM_Task, 'duration_secs': 0.621331} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.895016] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.895221] env[68285]: INFO nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Took 9.31 seconds to spawn the instance on the hypervisor. [ 824.895405] env[68285]: DEBUG nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.899025] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db1e95b-52eb-4e20-a5d0-fdae9b3a1cb7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.915989] env[68285]: DEBUG nova.network.neutron [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.027564] env[68285]: DEBUG nova.network.neutron [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.096401] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87ce916-ebf2-4614-86c5-dfae5a8c48c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.108020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1623b9bb-e9a0-4647-abb3-c8bef6336bf6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.147523] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d666cf49-e371-4e87-82b7-c0e7553426c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.160038] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cce96d-5dbc-4069-b065-651e9f32730e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.187271] env[68285]: DEBUG nova.compute.provider_tree [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.306149] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.823662} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.306412] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 8bedba57-e7c8-4fa8-b171-f6d74550a31c/8bedba57-e7c8-4fa8-b171-f6d74550a31c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 825.306527] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 825.306778] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c876578e-e99a-49cd-9e08-2f23ae199062 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.314221] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 825.314221] env[68285]: value = "task-2891207" [ 825.314221] env[68285]: _type = "Task" [ 825.314221] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.322779] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891207, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.328232] env[68285]: DEBUG nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.355081] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.355081] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.355081] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.355240] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.355340] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.355482] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.355698] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.355853] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.356039] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.356205] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.356389] env[68285]: DEBUG nova.virt.hardware [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.357320] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384631df-5faa-4145-82a9-3aa3b88d3384 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.365981] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ce2781-1aec-4f8f-8cf1-0450d444d128 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.419385] env[68285]: INFO nova.compute.manager [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Took 34.45 seconds to build instance. [ 825.533122] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f63eee8-70d7-4aaa-992a-a9ae09c50970 req-3cf01262-e68f-465d-ba55-da9708eca115 service nova] Releasing lock "refresh_cache-95f5e902-6385-4602-8458-7d7b2069a9da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.533122] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired lock "refresh_cache-95f5e902-6385-4602-8458-7d7b2069a9da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.533122] env[68285]: DEBUG nova.network.neutron [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.691762] env[68285]: DEBUG nova.scheduler.client.report [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.733636] env[68285]: DEBUG nova.network.neutron [-] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.493564] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea0afa33-74e7-4ea5-ac99-e8d6e91b7c54 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.460s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.496078] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.202s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.496550] env[68285]: DEBUG nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.499068] env[68285]: INFO nova.compute.manager [-] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Took 1.72 seconds to deallocate network for instance. [ 826.503843] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.927s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.505271] env[68285]: INFO nova.compute.claims [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.518478] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891207, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070137} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.518736] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.519589] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa260d1-ccc9-4e45-b485-7d681c5e48d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.543624] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] 8bedba57-e7c8-4fa8-b171-f6d74550a31c/8bedba57-e7c8-4fa8-b171-f6d74550a31c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.544338] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-203051bf-9b1e-49dd-af76-1fe25c15d548 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.568278] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 826.568278] env[68285]: value = "task-2891208" [ 826.568278] env[68285]: _type = "Task" [ 826.568278] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.575899] env[68285]: DEBUG nova.network.neutron [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.585101] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891208, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.698569] env[68285]: DEBUG nova.compute.manager [req-5904a744-d4c8-4739-9f63-2ace47d9ee5b req-305ce3a8-8151-4229-b1c0-84bd5798b58d service nova] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Received event network-vif-deleted-60f03e16-4a3a-44b1-b442-db8e844f18a3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 826.737265] env[68285]: DEBUG nova.network.neutron [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Successfully updated port: 13f01d57-9418-46a8-90cb-0fa78c30305f {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.857024] env[68285]: DEBUG nova.network.neutron [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Updating instance_info_cache with network_info: [{"id": "62912856-783f-41a2-8ea4-8d02547b1b9a", "address": "fa:16:3e:85:3a:3f", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62912856-78", "ovs_interfaceid": "62912856-783f-41a2-8ea4-8d02547b1b9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.003374] env[68285]: DEBUG nova.compute.utils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 827.005576] env[68285]: DEBUG nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 827.005576] env[68285]: DEBUG nova.network.neutron [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.009979] env[68285]: DEBUG nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 827.016836] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.059731] env[68285]: DEBUG nova.policy [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4400d8f8e7a445c8f04d56cf7d0aef8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76ac2f0f1f5844b6a8682c10e5a75003', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 827.079982] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891208, 'name': ReconfigVM_Task, 'duration_secs': 0.302735} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.080798] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Reconfigured VM instance instance-00000018 to attach disk [datastore2] 8bedba57-e7c8-4fa8-b171-f6d74550a31c/8bedba57-e7c8-4fa8-b171-f6d74550a31c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.081442] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce7df5a1-ec59-455f-ae40-840e2c9914e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.091757] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 827.091757] env[68285]: value = "task-2891209" [ 827.091757] env[68285]: _type = "Task" [ 827.091757] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.101800] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891209, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.244044] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-7dca07f4-78aa-45e4-954a-c9f4d58e7c84" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.244044] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-7dca07f4-78aa-45e4-954a-c9f4d58e7c84" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.244044] env[68285]: DEBUG nova.network.neutron [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.356766] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Releasing lock "refresh_cache-95f5e902-6385-4602-8458-7d7b2069a9da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.357165] env[68285]: DEBUG nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Instance network_info: |[{"id": "62912856-783f-41a2-8ea4-8d02547b1b9a", "address": "fa:16:3e:85:3a:3f", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62912856-78", "ovs_interfaceid": "62912856-783f-41a2-8ea4-8d02547b1b9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 827.357603] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:3a:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62912856-783f-41a2-8ea4-8d02547b1b9a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.367873] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 827.368158] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.368396] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f0c2112-dcc5-43df-8303-74c3bf8c68df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.393528] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.393528] env[68285]: value = "task-2891210" [ 827.393528] env[68285]: _type = "Task" [ 827.393528] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.412530] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891210, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.487621] env[68285]: DEBUG nova.network.neutron [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Successfully created port: 24fc61fe-8e63-4459-8435-25d4c23e10f2 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.508748] env[68285]: DEBUG nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.545951] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.606721] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891209, 'name': Rename_Task, 'duration_secs': 0.177925} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.606721] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.606937] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcb77356-c37a-46bc-b55c-2f74f8452acf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.615322] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 827.615322] env[68285]: value = "task-2891211" [ 827.615322] env[68285]: _type = "Task" [ 827.615322] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.630120] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891211, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.823933] env[68285]: DEBUG nova.network.neutron [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.907288] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891210, 'name': CreateVM_Task, 'duration_secs': 0.377725} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.908471] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 827.911406] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.911634] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.911950] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 827.912301] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4739c45b-f125-4bdd-b726-279874b12ce3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.920438] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 827.920438] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5269f338-3aea-6c5c-d7e8-246a9ecfe5b4" [ 827.920438] env[68285]: _type = "Task" [ 827.920438] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.939362] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5269f338-3aea-6c5c-d7e8-246a9ecfe5b4, 'name': SearchDatastore_Task, 'duration_secs': 0.014791} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.939669] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.939922] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 827.940146] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.940292] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.940471] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.940739] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea65b63d-078d-4b75-8320-26cc74176987 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.951071] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.951283] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 827.952195] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19ac15ab-50c7-4ebe-b91f-0c4b2b7cf54a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.958505] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 827.958505] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ab5f05-fd7f-5bb5-9202-ae5ccbafd3da" [ 827.958505] env[68285]: _type = "Task" [ 827.958505] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.971782] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ab5f05-fd7f-5bb5-9202-ae5ccbafd3da, 'name': SearchDatastore_Task, 'duration_secs': 0.010752} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.972740] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8f7eeba-059d-468e-b2cd-f1de05e4ea41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.979467] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 827.979467] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b67223-42ec-624b-6443-25b23714560f" [ 827.979467] env[68285]: _type = "Task" [ 827.979467] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.983381] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.983640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.983911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.984153] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.984366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.986849] env[68285]: INFO nova.compute.manager [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Terminating instance [ 827.992902] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b67223-42ec-624b-6443-25b23714560f, 'name': SearchDatastore_Task, 'duration_secs': 0.011214} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.996438] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.996717] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 95f5e902-6385-4602-8458-7d7b2069a9da/95f5e902-6385-4602-8458-7d7b2069a9da.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.999227] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-222436e8-3b12-4995-929d-a52a49d7cf02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.008834] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 828.008834] env[68285]: value = "task-2891212" [ 828.008834] env[68285]: _type = "Task" [ 828.008834] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.025058] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.128365] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891211, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.133450] env[68285]: DEBUG nova.network.neutron [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Updating instance_info_cache with network_info: [{"id": "13f01d57-9418-46a8-90cb-0fa78c30305f", "address": "fa:16:3e:ac:2f:0d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f01d57-94", "ovs_interfaceid": "13f01d57-9418-46a8-90cb-0fa78c30305f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.180633] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9332757-7a95-441c-9075-10413bd8ec7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.191447] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdf1467-6091-4f3c-8c60-1fd33f9c6b4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.228167] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a8a4c8-312b-48b7-841d-bfa815f87b1b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.238801] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07aa8a0-d3bc-4275-a4f7-a4463f673c43 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.260505] env[68285]: DEBUG nova.compute.provider_tree [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.497406] env[68285]: DEBUG nova.compute.manager [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 828.498487] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.499166] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd1d8c0-c0e0-4bbd-bd34-87d2ab6126c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.518976] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.519306] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50f3ee7d-fcf8-4d55-8f62-6fe56e15cc5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.529085] env[68285]: DEBUG nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.530869] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891212, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.562564] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.563838] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.564171] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.564387] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.564556] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.564850] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.565677] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.565677] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.565677] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.565677] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.565859] env[68285]: DEBUG nova.virt.hardware [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.566755] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f62675e-0436-450c-8c93-4fad2151bcdb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.575740] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e91105-4295-4d90-a312-5cac733555a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.616962] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.617095] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.617285] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Deleting the datastore file [datastore1] f0145d64-60e4-4ad5-a6ea-6c5d40780df5 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.617549] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c5dc146-3391-41d3-8331-55d76cb67154 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.628439] env[68285]: DEBUG oslo_vmware.api [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891211, 'name': PowerOnVM_Task, 'duration_secs': 0.814479} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.630334] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.630560] env[68285]: INFO nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Took 10.31 seconds to spawn the instance on the hypervisor. [ 828.630739] env[68285]: DEBUG nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.631145] env[68285]: DEBUG oslo_vmware.api [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 828.631145] env[68285]: value = "task-2891214" [ 828.631145] env[68285]: _type = "Task" [ 828.631145] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.631798] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d491d0-6d6d-4f99-b910-7f74efc31fd3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.637770] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-7dca07f4-78aa-45e4-954a-c9f4d58e7c84" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.638099] env[68285]: DEBUG nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Instance network_info: |[{"id": "13f01d57-9418-46a8-90cb-0fa78c30305f", "address": "fa:16:3e:ac:2f:0d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f01d57-94", "ovs_interfaceid": "13f01d57-9418-46a8-90cb-0fa78c30305f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 828.638482] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:2f:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13f01d57-9418-46a8-90cb-0fa78c30305f', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.645964] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating folder: Project (9352aafac6e049feb8d74a91d1600224). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.648175] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a130d5a5-bd93-475b-8054-e3330e08b1f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.655691] env[68285]: DEBUG oslo_vmware.api [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.666310] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created folder: Project (9352aafac6e049feb8d74a91d1600224) in parent group-v580775. [ 828.666545] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating folder: Instances. Parent ref: group-v580843. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.666820] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76d028ed-3ecd-4279-a174-4e240feb5713 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.668729] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 828.669037] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 828.680819] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created folder: Instances in parent group-v580843. [ 828.681090] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 828.681387] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 828.681506] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae000e82-81a0-44d1-b129-18e9e85b2b25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.704432] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.704432] env[68285]: value = "task-2891217" [ 828.704432] env[68285]: _type = "Task" [ 828.704432] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.715893] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891217, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.766642] env[68285]: DEBUG nova.scheduler.client.report [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.798112] env[68285]: DEBUG nova.compute.manager [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Received event network-vif-plugged-13f01d57-9418-46a8-90cb-0fa78c30305f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.798360] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Acquiring lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.798588] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.798764] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.798949] env[68285]: DEBUG nova.compute.manager [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] No waiting events found dispatching network-vif-plugged-13f01d57-9418-46a8-90cb-0fa78c30305f {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 828.799138] env[68285]: WARNING nova.compute.manager [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Received unexpected event network-vif-plugged-13f01d57-9418-46a8-90cb-0fa78c30305f for instance with vm_state building and task_state spawning. [ 828.799333] env[68285]: DEBUG nova.compute.manager [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Received event network-changed-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.799493] env[68285]: DEBUG nova.compute.manager [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Refreshing instance network info cache due to event network-changed-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 828.799728] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Acquiring lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.799831] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Acquired lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.800049] env[68285]: DEBUG nova.network.neutron [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Refreshing network info cache for port 462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.869284] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "940e0328-970d-4f49-a102-d8a00b8c299b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.870012] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "940e0328-970d-4f49-a102-d8a00b8c299b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.023172] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582818} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.023630] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 95f5e902-6385-4602-8458-7d7b2069a9da/95f5e902-6385-4602-8458-7d7b2069a9da.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 829.023918] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 829.024343] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c09b07fd-bfde-4592-99a4-e4bd0afdbc39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.033582] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 829.033582] env[68285]: value = "task-2891218" [ 829.033582] env[68285]: _type = "Task" [ 829.033582] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.043812] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891218, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.145244] env[68285]: DEBUG oslo_vmware.api [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181736} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.145572] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.145667] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.145840] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.146073] env[68285]: INFO nova.compute.manager [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Took 0.65 seconds to destroy the instance on the hypervisor. [ 829.146330] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.146515] env[68285]: DEBUG nova.compute.manager [-] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 829.146947] env[68285]: DEBUG nova.network.neutron [-] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.166322] env[68285]: INFO nova.compute.manager [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Took 36.26 seconds to build instance. [ 829.182855] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.183163] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.183403] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.183857] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.184105] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.184325] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.184463] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 829.184632] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.216029] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891217, 'name': CreateVM_Task, 'duration_secs': 0.386277} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.216273] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.217146] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.217319] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.217621] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 829.218157] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8ed1ec1-2f13-448e-97b6-8679248964c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.223838] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 829.223838] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523b8957-dbf5-9bdb-78ac-143b29e9fdf4" [ 829.223838] env[68285]: _type = "Task" [ 829.223838] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.234358] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523b8957-dbf5-9bdb-78ac-143b29e9fdf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.275202] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.275734] env[68285]: DEBUG nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.278341] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.300s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.279701] env[68285]: INFO nova.compute.claims [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.551240] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891218, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081133} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.551619] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.552509] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b950561-ccb8-485e-b272-4d08e1fec00a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.577413] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 95f5e902-6385-4602-8458-7d7b2069a9da/95f5e902-6385-4602-8458-7d7b2069a9da.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.577716] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01d9fff6-7be9-443f-9a27-d47951d64c08 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.602522] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 829.602522] env[68285]: value = "task-2891219" [ 829.602522] env[68285]: _type = "Task" [ 829.602522] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.611449] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891219, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.670444] env[68285]: DEBUG nova.network.neutron [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Successfully updated port: 24fc61fe-8e63-4459-8435-25d4c23e10f2 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.671800] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b7c1691-9794-47b1-88a4-49070aab5fac tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.330s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.687983] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.741237] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523b8957-dbf5-9bdb-78ac-143b29e9fdf4, 'name': SearchDatastore_Task, 'duration_secs': 0.010655} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.741577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.741851] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.742947] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.743183] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.743364] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.743663] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14e900a2-80e1-4f78-8e0f-990f5f434abc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.757024] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.757024] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.757024] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1180b057-7b13-4438-8602-9b1e7ee6c044 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.767967] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 829.767967] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]526ae0e3-9eec-e9e6-4512-889c3ec430e4" [ 829.767967] env[68285]: _type = "Task" [ 829.767967] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.773563] env[68285]: DEBUG nova.network.neutron [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updated VIF entry in instance network info cache for port 462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.773955] env[68285]: DEBUG nova.network.neutron [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updating instance_info_cache with network_info: [{"id": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "address": "fa:16:3e:c7:6a:21", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462b7f0c-cb", "ovs_interfaceid": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.783031] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526ae0e3-9eec-e9e6-4512-889c3ec430e4, 'name': SearchDatastore_Task, 'duration_secs': 0.011711} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.787083] env[68285]: DEBUG nova.compute.utils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 829.788302] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bda2044-d251-4825-a83a-1ac8950b3006 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.792359] env[68285]: DEBUG nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.796150] env[68285]: DEBUG nova.network.neutron [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.804041] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 829.804041] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52655635-9607-10d6-8f32-f4f4a8fbe88c" [ 829.804041] env[68285]: _type = "Task" [ 829.804041] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.833015] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52655635-9607-10d6-8f32-f4f4a8fbe88c, 'name': SearchDatastore_Task, 'duration_secs': 0.026007} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.833355] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.833630] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 7dca07f4-78aa-45e4-954a-c9f4d58e7c84/7dca07f4-78aa-45e4-954a-c9f4d58e7c84.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 829.833911] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44894ef9-31ef-46d4-9b9c-86434482ba22 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.847053] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 829.847053] env[68285]: value = "task-2891220" [ 829.847053] env[68285]: _type = "Task" [ 829.847053] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.856239] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.861466] env[68285]: DEBUG nova.policy [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddeabae0a62e427680378d950ab09a95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '055ac9b62b874648b8bde2fc7d4f5386', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 830.116347] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891219, 'name': ReconfigVM_Task, 'duration_secs': 0.318068} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.116686] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 95f5e902-6385-4602-8458-7d7b2069a9da/95f5e902-6385-4602-8458-7d7b2069a9da.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.117772] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4b8af82-0a97-4cd3-a257-4449e42d6091 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.129912] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 830.129912] env[68285]: value = "task-2891221" [ 830.129912] env[68285]: _type = "Task" [ 830.129912] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.151539] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891221, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.177651] env[68285]: DEBUG nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.182903] env[68285]: DEBUG nova.network.neutron [-] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.187872] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "refresh_cache-c8784827-a928-439d-abdf-d82b62a61152" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.189657] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquired lock "refresh_cache-c8784827-a928-439d-abdf-d82b62a61152" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.189932] env[68285]: DEBUG nova.network.neutron [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.221236] env[68285]: DEBUG nova.network.neutron [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Successfully created port: bc6618d2-82b3-4803-802e-d16377e5423f {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.278451] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Releasing lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.278731] env[68285]: DEBUG nova.compute.manager [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Received event network-changed-13f01d57-9418-46a8-90cb-0fa78c30305f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.280815] env[68285]: DEBUG nova.compute.manager [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Refreshing instance network info cache due to event network-changed-13f01d57-9418-46a8-90cb-0fa78c30305f. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 830.281250] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Acquiring lock "refresh_cache-7dca07f4-78aa-45e4-954a-c9f4d58e7c84" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.281490] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Acquired lock "refresh_cache-7dca07f4-78aa-45e4-954a-c9f4d58e7c84" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.281762] env[68285]: DEBUG nova.network.neutron [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Refreshing network info cache for port 13f01d57-9418-46a8-90cb-0fa78c30305f {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.294509] env[68285]: DEBUG nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.364958] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891220, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.639851] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891221, 'name': Rename_Task, 'duration_secs': 0.315066} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.640290] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.640581] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04a6d6b2-45df-4a2b-a9b7-a85c24e2910c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.647314] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 830.647314] env[68285]: value = "task-2891222" [ 830.647314] env[68285]: _type = "Task" [ 830.647314] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.658346] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.695563] env[68285]: INFO nova.compute.manager [-] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Took 1.55 seconds to deallocate network for instance. [ 830.713608] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.758607] env[68285]: DEBUG nova.network.neutron [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.857636] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891220, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.859285] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32fac23-c740-4599-9b41-43f425649f7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.869011] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64928ece-962a-4aa9-8197-491cd8276649 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.902345] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f911c0-893a-49f9-8cda-2853c11431c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.911529] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89396aa7-431f-46bd-a3c6-0d9e55515832 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.926525] env[68285]: DEBUG nova.compute.provider_tree [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.154460] env[68285]: DEBUG nova.network.neutron [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Updating instance_info_cache with network_info: [{"id": "24fc61fe-8e63-4459-8435-25d4c23e10f2", "address": "fa:16:3e:5b:7b:b3", "network": {"id": "1e4937a9-cd6f-4fdc-9eb2-27e2750ba9c5", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1810722646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76ac2f0f1f5844b6a8682c10e5a75003", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24fc61fe-8e", "ovs_interfaceid": "24fc61fe-8e63-4459-8435-25d4c23e10f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.163334] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891222, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.206818] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.309689] env[68285]: DEBUG nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.320963] env[68285]: DEBUG nova.network.neutron [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Updated VIF entry in instance network info cache for port 13f01d57-9418-46a8-90cb-0fa78c30305f. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.321286] env[68285]: DEBUG nova.network.neutron [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Updating instance_info_cache with network_info: [{"id": "13f01d57-9418-46a8-90cb-0fa78c30305f", "address": "fa:16:3e:ac:2f:0d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f01d57-94", "ovs_interfaceid": "13f01d57-9418-46a8-90cb-0fa78c30305f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.342900] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.343188] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.343349] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.343533] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.343671] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.343827] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.344062] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.344663] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.344663] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.344663] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.344989] env[68285]: DEBUG nova.virt.hardware [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.345896] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174560c9-aea6-4036-b039-b234ed96d8fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.360475] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adfdef0-60f9-42a4-ba84-1b2a54b5981f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.364266] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891220, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.429927] env[68285]: DEBUG nova.scheduler.client.report [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.659910] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Releasing lock "refresh_cache-c8784827-a928-439d-abdf-d82b62a61152" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.660231] env[68285]: DEBUG nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Instance network_info: |[{"id": "24fc61fe-8e63-4459-8435-25d4c23e10f2", "address": "fa:16:3e:5b:7b:b3", "network": {"id": "1e4937a9-cd6f-4fdc-9eb2-27e2750ba9c5", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1810722646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76ac2f0f1f5844b6a8682c10e5a75003", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24fc61fe-8e", "ovs_interfaceid": "24fc61fe-8e63-4459-8435-25d4c23e10f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 831.660704] env[68285]: DEBUG oslo_vmware.api [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891222, 'name': PowerOnVM_Task, 'duration_secs': 0.547701} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.661086] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:7b:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7908211b-df93-467b-87a8-3c3d29b03de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24fc61fe-8e63-4459-8435-25d4c23e10f2', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.669540] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Creating folder: Project (76ac2f0f1f5844b6a8682c10e5a75003). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.669851] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.670113] env[68285]: INFO nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Took 9.17 seconds to spawn the instance on the hypervisor. [ 831.670328] env[68285]: DEBUG nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 831.670606] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03218fee-fa1b-44e3-b273-506c5633267e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.672894] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e92b196-a82b-42a0-8bc0-16b9308dbcb1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.688991] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Created folder: Project (76ac2f0f1f5844b6a8682c10e5a75003) in parent group-v580775. [ 831.689196] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Creating folder: Instances. Parent ref: group-v580846. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.689428] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1b01752-f0ff-4e0f-ad90-ba8101437d3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.701083] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Created folder: Instances in parent group-v580846. [ 831.701339] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 831.701564] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8784827-a928-439d-abdf-d82b62a61152] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.701818] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-578c97c0-5d17-419c-ae12-82c6632c788e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.730135] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.730135] env[68285]: value = "task-2891225" [ 831.730135] env[68285]: _type = "Task" [ 831.730135] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.737232] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891225, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.745492] env[68285]: DEBUG nova.compute.manager [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Received event network-vif-plugged-24fc61fe-8e63-4459-8435-25d4c23e10f2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.745784] env[68285]: DEBUG oslo_concurrency.lockutils [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] Acquiring lock "c8784827-a928-439d-abdf-d82b62a61152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.746213] env[68285]: DEBUG oslo_concurrency.lockutils [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] Lock "c8784827-a928-439d-abdf-d82b62a61152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.746461] env[68285]: DEBUG oslo_concurrency.lockutils [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] Lock "c8784827-a928-439d-abdf-d82b62a61152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.746782] env[68285]: DEBUG nova.compute.manager [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] No waiting events found dispatching network-vif-plugged-24fc61fe-8e63-4459-8435-25d4c23e10f2 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.747065] env[68285]: WARNING nova.compute.manager [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Received unexpected event network-vif-plugged-24fc61fe-8e63-4459-8435-25d4c23e10f2 for instance with vm_state building and task_state spawning. [ 831.747336] env[68285]: DEBUG nova.compute.manager [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Received event network-changed-24fc61fe-8e63-4459-8435-25d4c23e10f2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.747574] env[68285]: DEBUG nova.compute.manager [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Refreshing instance network info cache due to event network-changed-24fc61fe-8e63-4459-8435-25d4c23e10f2. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 831.747846] env[68285]: DEBUG oslo_concurrency.lockutils [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] Acquiring lock "refresh_cache-c8784827-a928-439d-abdf-d82b62a61152" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.748163] env[68285]: DEBUG oslo_concurrency.lockutils [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] Acquired lock "refresh_cache-c8784827-a928-439d-abdf-d82b62a61152" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.748500] env[68285]: DEBUG nova.network.neutron [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Refreshing network info cache for port 24fc61fe-8e63-4459-8435-25d4c23e10f2 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.758583] env[68285]: DEBUG nova.network.neutron [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Successfully updated port: bc6618d2-82b3-4803-802e-d16377e5423f {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.823639] env[68285]: DEBUG oslo_concurrency.lockutils [req-24c5153a-ad87-4637-ada0-d73c58097184 req-f5325a02-f6d7-4038-a778-1f0fdfa2b6c5 service nova] Releasing lock "refresh_cache-7dca07f4-78aa-45e4-954a-c9f4d58e7c84" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.860152] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891220, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.549324} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.860433] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 7dca07f4-78aa-45e4-954a-c9f4d58e7c84/7dca07f4-78aa-45e4-954a-c9f4d58e7c84.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.860650] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.860902] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66aae020-d02a-4186-8045-814c40db8bff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.868577] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 831.868577] env[68285]: value = "task-2891226" [ 831.868577] env[68285]: _type = "Task" [ 831.868577] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.878120] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.936218] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.936703] env[68285]: DEBUG nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.939788] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.314s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.941464] env[68285]: INFO nova.compute.claims [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.142763] env[68285]: DEBUG nova.compute.manager [req-937a3d3a-c1a3-47cd-a73f-06b7e906199b req-014c6417-4de8-4359-9c63-98ea3404bfdd service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Received event network-vif-plugged-bc6618d2-82b3-4803-802e-d16377e5423f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.143446] env[68285]: DEBUG oslo_concurrency.lockutils [req-937a3d3a-c1a3-47cd-a73f-06b7e906199b req-014c6417-4de8-4359-9c63-98ea3404bfdd service nova] Acquiring lock "65f289bb-6e97-47ad-8531-c06a9cce302f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.143446] env[68285]: DEBUG oslo_concurrency.lockutils [req-937a3d3a-c1a3-47cd-a73f-06b7e906199b req-014c6417-4de8-4359-9c63-98ea3404bfdd service nova] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.143446] env[68285]: DEBUG oslo_concurrency.lockutils [req-937a3d3a-c1a3-47cd-a73f-06b7e906199b req-014c6417-4de8-4359-9c63-98ea3404bfdd service nova] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.143656] env[68285]: DEBUG nova.compute.manager [req-937a3d3a-c1a3-47cd-a73f-06b7e906199b req-014c6417-4de8-4359-9c63-98ea3404bfdd service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] No waiting events found dispatching network-vif-plugged-bc6618d2-82b3-4803-802e-d16377e5423f {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.143656] env[68285]: WARNING nova.compute.manager [req-937a3d3a-c1a3-47cd-a73f-06b7e906199b req-014c6417-4de8-4359-9c63-98ea3404bfdd service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Received unexpected event network-vif-plugged-bc6618d2-82b3-4803-802e-d16377e5423f for instance with vm_state building and task_state spawning. [ 832.197650] env[68285]: INFO nova.compute.manager [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Took 33.15 seconds to build instance. [ 832.239592] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891225, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.261406] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "refresh_cache-65f289bb-6e97-47ad-8531-c06a9cce302f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.261573] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquired lock "refresh_cache-65f289bb-6e97-47ad-8531-c06a9cce302f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.261762] env[68285]: DEBUG nova.network.neutron [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 832.378981] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075734} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.379530] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.380420] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65a9367-0a17-43cf-903d-2de898e2fa2b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.406296] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 7dca07f4-78aa-45e4-954a-c9f4d58e7c84/7dca07f4-78aa-45e4-954a-c9f4d58e7c84.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.409849] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3deb137-f0fb-4927-8973-cfe6cbabae7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.432598] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 832.432598] env[68285]: value = "task-2891227" [ 832.432598] env[68285]: _type = "Task" [ 832.432598] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.441848] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.447621] env[68285]: DEBUG nova.compute.utils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 832.451195] env[68285]: DEBUG nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 832.451375] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.495446] env[68285]: DEBUG nova.network.neutron [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Updated VIF entry in instance network info cache for port 24fc61fe-8e63-4459-8435-25d4c23e10f2. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.495846] env[68285]: DEBUG nova.network.neutron [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Updating instance_info_cache with network_info: [{"id": "24fc61fe-8e63-4459-8435-25d4c23e10f2", "address": "fa:16:3e:5b:7b:b3", "network": {"id": "1e4937a9-cd6f-4fdc-9eb2-27e2750ba9c5", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1810722646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76ac2f0f1f5844b6a8682c10e5a75003", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24fc61fe-8e", "ovs_interfaceid": "24fc61fe-8e63-4459-8435-25d4c23e10f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.570679] env[68285]: DEBUG nova.policy [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e71174a58844a39ad622581c1503028', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae4430b997b4480abbf2c5fce71cca04', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 832.702419] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7f4182c7-d0f6-4107-a05a-86ce02411649 tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "95f5e902-6385-4602-8458-7d7b2069a9da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.954s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.741554] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891225, 'name': CreateVM_Task, 'duration_secs': 0.690601} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.741734] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8784827-a928-439d-abdf-d82b62a61152] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.742445] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.742682] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.742992] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.743287] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c97d9531-1990-4459-910b-23fcf3250ad6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.749493] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 832.749493] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523be218-5bdd-3145-6250-3c98a50a47ce" [ 832.749493] env[68285]: _type = "Task" [ 832.749493] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.758609] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523be218-5bdd-3145-6250-3c98a50a47ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.805156] env[68285]: DEBUG nova.network.neutron [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.945116] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.951981] env[68285]: DEBUG nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 832.955300] env[68285]: DEBUG nova.network.neutron [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Updating instance_info_cache with network_info: [{"id": "bc6618d2-82b3-4803-802e-d16377e5423f", "address": "fa:16:3e:49:84:01", "network": {"id": "62e4be83-9686-497a-92e0-3b55db97710d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1235660515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "055ac9b62b874648b8bde2fc7d4f5386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc6618d2-82", "ovs_interfaceid": "bc6618d2-82b3-4803-802e-d16377e5423f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.998335] env[68285]: DEBUG oslo_concurrency.lockutils [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] Releasing lock "refresh_cache-c8784827-a928-439d-abdf-d82b62a61152" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.998627] env[68285]: DEBUG nova.compute.manager [req-15cac157-7fd4-41a8-96bf-812d7c48be64 req-b9b34afc-ba3a-47ec-ad2d-673c663f6c0e service nova] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Received event network-vif-deleted-ca2a4f0d-528f-4c7a-a062-e628578f7f7e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 833.175224] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Successfully created port: bb64687c-e2d8-4813-9c8a-01b6904040e3 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.205267] env[68285]: DEBUG nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 833.268266] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523be218-5bdd-3145-6250-3c98a50a47ce, 'name': SearchDatastore_Task, 'duration_secs': 0.013442} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.270966] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.271223] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.271453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.271613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.271790] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.272262] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dfece01-9622-49c8-a4d5-31f7c4f5ea59 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.282232] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.282424] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.283153] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab8edb00-eee8-41fa-a28a-6c7b159410a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.290571] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 833.290571] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ad8839-4b4c-ff1e-331e-661898a3d6a6" [ 833.290571] env[68285]: _type = "Task" [ 833.290571] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.298411] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ad8839-4b4c-ff1e-331e-661898a3d6a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.447754] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891227, 'name': ReconfigVM_Task, 'duration_secs': 0.816893} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.448028] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 7dca07f4-78aa-45e4-954a-c9f4d58e7c84/7dca07f4-78aa-45e4-954a-c9f4d58e7c84.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.448860] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d845390a-a921-4208-9563-787536b2551b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.463123] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Releasing lock "refresh_cache-65f289bb-6e97-47ad-8531-c06a9cce302f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.463401] env[68285]: DEBUG nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Instance network_info: |[{"id": "bc6618d2-82b3-4803-802e-d16377e5423f", "address": "fa:16:3e:49:84:01", "network": {"id": "62e4be83-9686-497a-92e0-3b55db97710d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1235660515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "055ac9b62b874648b8bde2fc7d4f5386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc6618d2-82", "ovs_interfaceid": "bc6618d2-82b3-4803-802e-d16377e5423f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.463732] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 833.463732] env[68285]: value = "task-2891228" [ 833.463732] env[68285]: _type = "Task" [ 833.463732] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.464334] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:84:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc6618d2-82b3-4803-802e-d16377e5423f', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.472184] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Creating folder: Project (055ac9b62b874648b8bde2fc7d4f5386). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.472876] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac89ade9-ba00-4a5c-90ca-f71aa9b4a2c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.488553] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891228, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.494379] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Created folder: Project (055ac9b62b874648b8bde2fc7d4f5386) in parent group-v580775. [ 833.494582] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Creating folder: Instances. Parent ref: group-v580849. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.497452] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d273128-7726-4041-b374-8894c3c62087 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.508498] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Created folder: Instances in parent group-v580849. [ 833.508739] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.508925] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.509144] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01da5756-4f63-4775-afb8-27408e49efa4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.532145] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.532145] env[68285]: value = "task-2891231" [ 833.532145] env[68285]: _type = "Task" [ 833.532145] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.541783] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891231, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.592926] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f07cb4c-6a56-4131-82b6-128ab2b758b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.602031] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd7fe75-9f96-4f54-bc50-de9c556cd8f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.636214] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997b1f9c-3fbc-47b2-961e-b41b1bc7004a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.646501] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a8d829-8e5f-4d1b-8fca-2c2405abfad0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.663011] env[68285]: DEBUG nova.compute.provider_tree [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.714384] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "95f5e902-6385-4602-8458-7d7b2069a9da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.714748] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "95f5e902-6385-4602-8458-7d7b2069a9da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.714821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "95f5e902-6385-4602-8458-7d7b2069a9da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.715055] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "95f5e902-6385-4602-8458-7d7b2069a9da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.715256] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "95f5e902-6385-4602-8458-7d7b2069a9da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.727865] env[68285]: INFO nova.compute.manager [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Terminating instance [ 833.735323] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.805357] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ad8839-4b4c-ff1e-331e-661898a3d6a6, 'name': SearchDatastore_Task, 'duration_secs': 0.009872} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.806218] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d579fac2-a6a2-4471-b1ff-89db9ef26622 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.812917] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 833.812917] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52839052-25be-4197-c36f-159725b87c2e" [ 833.812917] env[68285]: _type = "Task" [ 833.812917] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.823473] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52839052-25be-4197-c36f-159725b87c2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.966104] env[68285]: DEBUG nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 833.988264] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891228, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.990667] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Successfully created port: 57f2793d-ad69-4e92-9f57-d7c6255ff40d {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.003704] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.003952] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.004129] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.004318] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.004456] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.004606] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.004801] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.004976] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.005404] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.005588] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.005769] env[68285]: DEBUG nova.virt.hardware [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.007221] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9625346c-37d8-41cf-bb72-2caf451f8803 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.016544] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34452739-9f26-4faf-bc30-bd6c7d7b2b12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.042858] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891231, 'name': CreateVM_Task, 'duration_secs': 0.419707} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.043067] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.043749] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.043907] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.044258] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 834.044518] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ad29bb3-16ed-467c-8465-694c60337357 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.050687] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 834.050687] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522b92f8-1dcb-79db-7ffe-34f79add7fa1" [ 834.050687] env[68285]: _type = "Task" [ 834.050687] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.060316] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522b92f8-1dcb-79db-7ffe-34f79add7fa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.166593] env[68285]: DEBUG nova.scheduler.client.report [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.232058] env[68285]: DEBUG nova.compute.manager [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.232058] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.232627] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e1618e-fed0-4b99-ad17-5b5308f414a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.242047] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.242301] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-528eb965-a673-4505-a23a-a26b33049623 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.250688] env[68285]: DEBUG oslo_vmware.api [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 834.250688] env[68285]: value = "task-2891232" [ 834.250688] env[68285]: _type = "Task" [ 834.250688] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.259426] env[68285]: DEBUG oslo_vmware.api [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.323567] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52839052-25be-4197-c36f-159725b87c2e, 'name': SearchDatastore_Task, 'duration_secs': 0.012987} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.323669] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.323921] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c8784827-a928-439d-abdf-d82b62a61152/c8784827-a928-439d-abdf-d82b62a61152.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.324194] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8d48b15-0d38-45ea-a16a-f3299aaf0343 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.334054] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 834.334054] env[68285]: value = "task-2891233" [ 834.334054] env[68285]: _type = "Task" [ 834.334054] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.346685] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891233, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.484872] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891228, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.486146] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Successfully created port: 1b81cd45-5a3e-4884-af46-ea57107a812b {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.564453] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522b92f8-1dcb-79db-7ffe-34f79add7fa1, 'name': SearchDatastore_Task, 'duration_secs': 0.010786} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.565079] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.565333] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.565634] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.566086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.566306] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.566675] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17a2b10c-7ef4-4706-93e5-a318e259f604 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.581155] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.581288] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.582029] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b35b8da4-837e-4272-aafb-6fec4b6b0bda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.588844] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 834.588844] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5234bcaf-0071-d1f2-2ddb-51ddbfd2e2b1" [ 834.588844] env[68285]: _type = "Task" [ 834.588844] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.599141] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5234bcaf-0071-d1f2-2ddb-51ddbfd2e2b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.672300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.672842] env[68285]: DEBUG nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 834.675728] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.875s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.768469] env[68285]: DEBUG oslo_vmware.api [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.844450] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891233, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.905304] env[68285]: DEBUG nova.compute.manager [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.905304] env[68285]: DEBUG nova.compute.manager [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing instance network info cache due to event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 834.905304] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Acquiring lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.905304] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Acquired lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.906086] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.991605] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891228, 'name': Rename_Task, 'duration_secs': 1.301135} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.992066] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.992412] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c50f48a2-b614-49ad-9668-a853811f208a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.000629] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 835.000629] env[68285]: value = "task-2891234" [ 835.000629] env[68285]: _type = "Task" [ 835.000629] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.013119] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.077719] env[68285]: DEBUG nova.compute.manager [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Received event network-changed-bc6618d2-82b3-4803-802e-d16377e5423f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 835.078296] env[68285]: DEBUG nova.compute.manager [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Refreshing instance network info cache due to event network-changed-bc6618d2-82b3-4803-802e-d16377e5423f. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 835.078756] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] Acquiring lock "refresh_cache-65f289bb-6e97-47ad-8531-c06a9cce302f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.078997] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] Acquired lock "refresh_cache-65f289bb-6e97-47ad-8531-c06a9cce302f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.079289] env[68285]: DEBUG nova.network.neutron [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Refreshing network info cache for port bc6618d2-82b3-4803-802e-d16377e5423f {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.103760] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5234bcaf-0071-d1f2-2ddb-51ddbfd2e2b1, 'name': SearchDatastore_Task, 'duration_secs': 0.020634} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.105016] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02e5e0d7-3087-465a-8640-f3cf286eb879 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.112959] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 835.112959] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524ab95f-2e7e-2f9c-5ae8-02cfd3421899" [ 835.112959] env[68285]: _type = "Task" [ 835.112959] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.122459] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524ab95f-2e7e-2f9c-5ae8-02cfd3421899, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.179238] env[68285]: DEBUG nova.compute.utils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 835.183331] env[68285]: DEBUG nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 835.183451] env[68285]: DEBUG nova.network.neutron [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.233580] env[68285]: DEBUG nova.policy [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fee422406a774be7830837baa9743f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7106da1f6bcb4d0cb3dcad984b3adb33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 835.265730] env[68285]: DEBUG oslo_vmware.api [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891232, 'name': PowerOffVM_Task, 'duration_secs': 0.771699} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.267795] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.267994] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.268869] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32a75818-0f79-4bc7-9006-e18272706951 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.350921] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891233, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514534} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.352678] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c8784827-a928-439d-abdf-d82b62a61152/c8784827-a928-439d-abdf-d82b62a61152.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.353166] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.353609] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.354113] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.354556] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Deleting the datastore file [datastore2] 95f5e902-6385-4602-8458-7d7b2069a9da {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.355268] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30e6d475-b731-417d-ad9a-65fd8b48c2d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.358679] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-deb1aeb6-56ec-45dd-9cee-e75a3b0af364 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.372965] env[68285]: DEBUG oslo_vmware.api [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for the task: (returnval){ [ 835.372965] env[68285]: value = "task-2891236" [ 835.372965] env[68285]: _type = "Task" [ 835.372965] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.373259] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 835.373259] env[68285]: value = "task-2891237" [ 835.373259] env[68285]: _type = "Task" [ 835.373259] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.389244] env[68285]: DEBUG oslo_vmware.api [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.392445] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891237, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.515215] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891234, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.609845] env[68285]: DEBUG nova.network.neutron [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Successfully created port: b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.624447] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524ab95f-2e7e-2f9c-5ae8-02cfd3421899, 'name': SearchDatastore_Task, 'duration_secs': 0.011494} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.627152] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.627428] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 65f289bb-6e97-47ad-8531-c06a9cce302f/65f289bb-6e97-47ad-8531-c06a9cce302f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.627929] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c69378e2-2b12-41eb-8901-a37361d9406f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.643521] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 835.643521] env[68285]: value = "task-2891238" [ 835.643521] env[68285]: _type = "Task" [ 835.643521] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.654548] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.684440] env[68285]: DEBUG nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 835.848136] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1e7810-abe7-47c8-870d-042ad0c38d28 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.860018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a212a2-6a5f-40fd-9aec-404f9f737d27 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.904054] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updated VIF entry in instance network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.904054] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [{"id": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "address": "fa:16:3e:75:c1:88", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b795cd1-99", "ovs_interfaceid": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.912633] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2e82e5-ad66-41fe-a64e-9c1d1678d291 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.935066] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4805140b-3d78-4ebb-9516-f79d685623fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.940225] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891237, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076924} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.940544] env[68285]: DEBUG oslo_vmware.api [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Task: {'id': task-2891236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169529} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.940842] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.941244] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.941416] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.941623] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.941831] env[68285]: INFO nova.compute.manager [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Took 1.71 seconds to destroy the instance on the hypervisor. [ 835.942091] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 835.943477] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff511ab-e7d1-4dc0-bc29-30edc023b35d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.949783] env[68285]: DEBUG nova.compute.manager [-] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 835.949783] env[68285]: DEBUG nova.network.neutron [-] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.958654] env[68285]: DEBUG nova.compute.provider_tree [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.967229] env[68285]: DEBUG nova.network.neutron [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Updated VIF entry in instance network info cache for port bc6618d2-82b3-4803-802e-d16377e5423f. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.967229] env[68285]: DEBUG nova.network.neutron [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Updating instance_info_cache with network_info: [{"id": "bc6618d2-82b3-4803-802e-d16377e5423f", "address": "fa:16:3e:49:84:01", "network": {"id": "62e4be83-9686-497a-92e0-3b55db97710d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1235660515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "055ac9b62b874648b8bde2fc7d4f5386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc6618d2-82", "ovs_interfaceid": "bc6618d2-82b3-4803-802e-d16377e5423f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.987779] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] c8784827-a928-439d-abdf-d82b62a61152/c8784827-a928-439d-abdf-d82b62a61152.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.989332] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b02270a-3fa0-4c12-8834-aef2152e78b4 req-e06fa6d4-88aa-4354-990e-b142f882ae26 service nova] Releasing lock "refresh_cache-65f289bb-6e97-47ad-8531-c06a9cce302f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.989891] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a673055a-aee9-40db-b91b-d7be46085034 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.017041] env[68285]: DEBUG oslo_vmware.api [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891234, 'name': PowerOnVM_Task, 'duration_secs': 0.56249} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.022330] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.022330] env[68285]: INFO nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Took 10.69 seconds to spawn the instance on the hypervisor. [ 836.022330] env[68285]: DEBUG nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.022330] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 836.022330] env[68285]: value = "task-2891239" [ 836.022330] env[68285]: _type = "Task" [ 836.022330] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.022330] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bab6d33-5c46-40b0-86c1-493d492ea374 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.035445] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891239, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.155536] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891238, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495693} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.155799] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 65f289bb-6e97-47ad-8531-c06a9cce302f/65f289bb-6e97-47ad-8531-c06a9cce302f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.156015] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.156272] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49fd7f48-62d7-4590-8b1f-b0b05fe6b5ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.164216] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 836.164216] env[68285]: value = "task-2891240" [ 836.164216] env[68285]: _type = "Task" [ 836.164216] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.178023] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891240, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.416651] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Releasing lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.417063] env[68285]: DEBUG nova.compute.manager [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 836.417494] env[68285]: DEBUG nova.compute.manager [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing instance network info cache due to event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 836.417899] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Acquiring lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.418223] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Acquired lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.419056] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.462404] env[68285]: DEBUG nova.scheduler.client.report [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.534577] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891239, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.550949] env[68285]: INFO nova.compute.manager [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Took 37.07 seconds to build instance. [ 836.677578] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073914} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.679184] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.680176] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c9c345-5389-4bc2-88f3-0466a0798fff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.704898] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 65f289bb-6e97-47ad-8531-c06a9cce302f/65f289bb-6e97-47ad-8531-c06a9cce302f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.706500] env[68285]: DEBUG nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 836.708836] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc3911eb-d862-4fa3-978f-baf377cc677a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.732633] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 836.732633] env[68285]: value = "task-2891241" [ 836.732633] env[68285]: _type = "Task" [ 836.732633] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.743983] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891241, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.752880] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 836.753190] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.753390] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.753613] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.753786] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.753960] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 836.754252] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 836.754453] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 836.754662] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 836.754860] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 836.755137] env[68285]: DEBUG nova.virt.hardware [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 836.756407] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ab735a-2053-4387-971a-e18def0a8b4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.765567] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec01a19-46d6-41c2-ad18-83b3e61055c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.825760] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Successfully updated port: bb64687c-e2d8-4813-9c8a-01b6904040e3 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.035629] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891239, 'name': ReconfigVM_Task, 'duration_secs': 0.802053} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.036021] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Reconfigured VM instance instance-0000001b to attach disk [datastore2] c8784827-a928-439d-abdf-d82b62a61152/c8784827-a928-439d-abdf-d82b62a61152.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.036760] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a948229-16c6-46a5-9f70-eedfdf22d9a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.045713] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 837.045713] env[68285]: value = "task-2891242" [ 837.045713] env[68285]: _type = "Task" [ 837.045713] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.051729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08af7d60-2b5a-4f52-a15d-bc299ade9b5b tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.267s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.061412] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891242, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.180913] env[68285]: DEBUG nova.network.neutron [-] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.245478] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891241, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.306025] env[68285]: DEBUG nova.network.neutron [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Successfully updated port: b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.377397] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updated VIF entry in instance network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.377632] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [{"id": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "address": "fa:16:3e:75:c1:88", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b795cd1-99", "ovs_interfaceid": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.474690] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.799s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.477641] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.522s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.477867] env[68285]: DEBUG nova.objects.instance [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lazy-loading 'resources' on Instance uuid 9f4b2b94-ec19-4a8e-8663-ab71c417d093 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.557763] env[68285]: DEBUG nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 837.560557] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891242, 'name': Rename_Task, 'duration_secs': 0.27726} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.560801] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.562158] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cdd0efa-3490-4856-8b23-ba47a5b5a436 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.572584] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 837.572584] env[68285]: value = "task-2891243" [ 837.572584] env[68285]: _type = "Task" [ 837.572584] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.583458] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891243, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.683488] env[68285]: INFO nova.compute.manager [-] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Took 1.74 seconds to deallocate network for instance. [ 837.745869] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891241, 'name': ReconfigVM_Task, 'duration_secs': 0.636584} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.746388] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 65f289bb-6e97-47ad-8531-c06a9cce302f/65f289bb-6e97-47ad-8531-c06a9cce302f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.746826] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c01249e9-d67a-4ede-8ff3-bad288374118 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.755054] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 837.755054] env[68285]: value = "task-2891244" [ 837.755054] env[68285]: _type = "Task" [ 837.755054] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.763880] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891244, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.806848] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.807082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.807250] env[68285]: DEBUG nova.network.neutron [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.884680] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Releasing lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.885495] env[68285]: DEBUG nova.compute.manager [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Received event network-changed-320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.886353] env[68285]: DEBUG nova.compute.manager [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Refreshing instance network info cache due to event network-changed-320c995b-dad7-40a2-90c1-1e0f3065e6cb. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 837.886353] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Acquiring lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.886353] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Acquired lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.886733] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Refreshing network info cache for port 320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.896657] env[68285]: DEBUG nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-vif-plugged-bb64687c-e2d8-4813-9c8a-01b6904040e3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.896891] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Acquiring lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.897246] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.897381] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.897547] env[68285]: DEBUG nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] No waiting events found dispatching network-vif-plugged-bb64687c-e2d8-4813-9c8a-01b6904040e3 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 837.897708] env[68285]: WARNING nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received unexpected event network-vif-plugged-bb64687c-e2d8-4813-9c8a-01b6904040e3 for instance with vm_state building and task_state spawning. [ 837.897866] env[68285]: DEBUG nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Received event network-vif-deleted-62912856-783f-41a2-8ea4-8d02547b1b9a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.898039] env[68285]: DEBUG nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-changed-bb64687c-e2d8-4813-9c8a-01b6904040e3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.898197] env[68285]: DEBUG nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Refreshing instance network info cache due to event network-changed-bb64687c-e2d8-4813-9c8a-01b6904040e3. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 837.898374] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Acquiring lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.898558] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Acquired lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.898807] env[68285]: DEBUG nova.network.neutron [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Refreshing network info cache for port bb64687c-e2d8-4813-9c8a-01b6904040e3 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.026776] env[68285]: DEBUG nova.compute.manager [req-c7bad572-e122-4972-9007-bd2beac62b5a req-247a49d0-4fba-4b7f-8bb6-e9531b417576 service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Received event network-vif-plugged-b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 838.027095] env[68285]: DEBUG oslo_concurrency.lockutils [req-c7bad572-e122-4972-9007-bd2beac62b5a req-247a49d0-4fba-4b7f-8bb6-e9531b417576 service nova] Acquiring lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.027313] env[68285]: DEBUG oslo_concurrency.lockutils [req-c7bad572-e122-4972-9007-bd2beac62b5a req-247a49d0-4fba-4b7f-8bb6-e9531b417576 service nova] Lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.027530] env[68285]: DEBUG oslo_concurrency.lockutils [req-c7bad572-e122-4972-9007-bd2beac62b5a req-247a49d0-4fba-4b7f-8bb6-e9531b417576 service nova] Lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.027711] env[68285]: DEBUG nova.compute.manager [req-c7bad572-e122-4972-9007-bd2beac62b5a req-247a49d0-4fba-4b7f-8bb6-e9531b417576 service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] No waiting events found dispatching network-vif-plugged-b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 838.027873] env[68285]: WARNING nova.compute.manager [req-c7bad572-e122-4972-9007-bd2beac62b5a req-247a49d0-4fba-4b7f-8bb6-e9531b417576 service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Received unexpected event network-vif-plugged-b67172eb-4f98-4870-a433-22f6e238cbf4 for instance with vm_state building and task_state spawning. [ 838.035883] env[68285]: INFO nova.scheduler.client.report [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Deleted allocation for migration ee2c2826-d8be-4236-8069-9c4a38957ca5 [ 838.091306] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891243, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.092999] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.190086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.268083] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891244, 'name': Rename_Task, 'duration_secs': 0.187869} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.268373] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.268621] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cee5343d-6f99-4600-8f4b-d0d09e1b2329 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.277682] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 838.277682] env[68285]: value = "task-2891245" [ 838.277682] env[68285]: _type = "Task" [ 838.277682] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.287169] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891245, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.320911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.320911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.320911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.320911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.321120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.322759] env[68285]: INFO nova.compute.manager [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Terminating instance [ 838.377510] env[68285]: DEBUG nova.network.neutron [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.465680] env[68285]: DEBUG nova.network.neutron [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.544214] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f28fc5f6-a7dd-413a-b3b9-0ab4357d9ed4 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 25.037s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.558933] env[68285]: DEBUG nova.network.neutron [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.594502] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.594747] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.595100] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.595886] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.596212] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.598355] env[68285]: DEBUG oslo_vmware.api [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891243, 'name': PowerOnVM_Task, 'duration_secs': 0.591283} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.603320] env[68285]: INFO nova.compute.manager [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Terminating instance [ 838.605899] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.606226] env[68285]: INFO nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Took 10.08 seconds to spawn the instance on the hypervisor. [ 838.606553] env[68285]: DEBUG nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.607768] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afed906-1988-43f3-af2c-8b1a59d9783b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.619704] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a02b46-c6fe-414d-9fa2-8a4239a26097 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.623671] env[68285]: DEBUG nova.compute.manager [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.623984] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.626219] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a932a5d8-0ee5-4ee8-b052-221cf406caf8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.635645] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e0aff0-c2a1-4f87-84ba-aa92b02a0675 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.646482] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.653098] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9309367-45da-46ce-8572-672ad89bf6c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.704674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6489969a-e474-4d4c-bb5c-4512d0362a2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.711130] env[68285]: DEBUG oslo_vmware.api [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 838.711130] env[68285]: value = "task-2891246" [ 838.711130] env[68285]: _type = "Task" [ 838.711130] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.724256] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e420475-de2f-483b-ac38-208c7d109f82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.732183] env[68285]: DEBUG oslo_vmware.api [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.743430] env[68285]: DEBUG nova.compute.provider_tree [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.792149] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891245, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.813499] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Successfully updated port: 57f2793d-ad69-4e92-9f57-d7c6255ff40d {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.832046] env[68285]: DEBUG nova.compute.manager [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.832046] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.832578] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b51bfd2-8448-410f-bcff-f26178b3282b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.841858] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.842141] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5941cc4-c394-41d5-8af6-8144b161debf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.849931] env[68285]: DEBUG oslo_vmware.api [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 838.849931] env[68285]: value = "task-2891247" [ 838.849931] env[68285]: _type = "Task" [ 838.849931] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.861115] env[68285]: DEBUG oslo_vmware.api [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.907933] env[68285]: DEBUG nova.network.neutron [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.052171] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updated VIF entry in instance network info cache for port 320c995b-dad7-40a2-90c1-1e0f3065e6cb. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.052657] env[68285]: DEBUG nova.network.neutron [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updating instance_info_cache with network_info: [{"id": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "address": "fa:16:3e:90:33:19", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320c995b-da", "ovs_interfaceid": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.069632] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Releasing lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.069632] env[68285]: DEBUG nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Received event network-changed-320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.069852] env[68285]: DEBUG nova.compute.manager [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Refreshing instance network info cache due to event network-changed-320c995b-dad7-40a2-90c1-1e0f3065e6cb. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 839.069942] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Acquiring lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.173264] env[68285]: INFO nova.compute.manager [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Took 36.09 seconds to build instance. [ 839.223959] env[68285]: DEBUG oslo_vmware.api [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891246, 'name': PowerOffVM_Task, 'duration_secs': 0.22179} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.224513] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.224689] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.224940] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3582132-1e2c-4b02-abf4-a320381ad071 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.246741] env[68285]: DEBUG nova.scheduler.client.report [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 839.290204] env[68285]: DEBUG oslo_vmware.api [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891245, 'name': PowerOnVM_Task, 'duration_secs': 0.765868} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.290544] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.290758] env[68285]: INFO nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Took 7.98 seconds to spawn the instance on the hypervisor. [ 839.290964] env[68285]: DEBUG nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.291775] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99397b38-e0e8-436b-8235-ae91f07d6d30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.321385] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.321786] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.322127] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Deleting the datastore file [datastore2] 8bedba57-e7c8-4fa8-b171-f6d74550a31c {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.322555] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-316d9355-57eb-45fc-b47c-422a431b4243 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.332151] env[68285]: DEBUG oslo_vmware.api [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 839.332151] env[68285]: value = "task-2891249" [ 839.332151] env[68285]: _type = "Task" [ 839.332151] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.343198] env[68285]: DEBUG oslo_vmware.api [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891249, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.359197] env[68285]: DEBUG oslo_vmware.api [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891247, 'name': PowerOffVM_Task, 'duration_secs': 0.25044} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.362619] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.362619] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.362619] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff6ab883-ee84-4464-b4d2-37de0766785c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.411600] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.414015] env[68285]: DEBUG nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Instance network_info: |[{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 839.414167] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:33:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b67172eb-4f98-4870-a433-22f6e238cbf4', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.423119] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating folder: Project (7106da1f6bcb4d0cb3dcad984b3adb33). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.423533] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-730b57ca-a688-4f3a-b053-c077608f9291 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.439179] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Created folder: Project (7106da1f6bcb4d0cb3dcad984b3adb33) in parent group-v580775. [ 839.439179] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating folder: Instances. Parent ref: group-v580852. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.439534] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.439863] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.440156] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleting the datastore file [datastore2] 7dca07f4-78aa-45e4-954a-c9f4d58e7c84 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.440569] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef0e1548-caa6-404f-98c0-a3497f57d978 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.442639] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7f65147-7359-4c4a-aca0-e048a99e7679 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.452019] env[68285]: DEBUG oslo_vmware.api [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 839.452019] env[68285]: value = "task-2891253" [ 839.452019] env[68285]: _type = "Task" [ 839.452019] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.455470] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Created folder: Instances in parent group-v580852. [ 839.455893] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.459083] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 839.459480] env[68285]: DEBUG oslo_vmware.api [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.459776] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acb05e1a-5cc9-4cb5-bc5b-e20f69c2f835 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.482091] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.482091] env[68285]: value = "task-2891254" [ 839.482091] env[68285]: _type = "Task" [ 839.482091] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.493301] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891254, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.558177] env[68285]: DEBUG oslo_concurrency.lockutils [req-566a34a8-5bed-45b2-89b1-b0a286c8b30d req-13385754-e544-4e0a-9d4b-9d9cb5696380 service nova] Releasing lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.558177] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Acquired lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.558177] env[68285]: DEBUG nova.network.neutron [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Refreshing network info cache for port 320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.677162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d2c6f8c-526f-41c8-8385-f589b2666f9e tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "c8784827-a928-439d-abdf-d82b62a61152" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.273s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.755021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.274s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.755021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.366s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.756907] env[68285]: INFO nova.compute.claims [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.778881] env[68285]: INFO nova.scheduler.client.report [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Deleted allocations for instance 9f4b2b94-ec19-4a8e-8663-ab71c417d093 [ 839.822337] env[68285]: INFO nova.compute.manager [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Took 36.27 seconds to build instance. [ 839.844654] env[68285]: DEBUG oslo_vmware.api [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891249, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162773} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.845073] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.845532] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.845837] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.847329] env[68285]: INFO nova.compute.manager [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Took 1.22 seconds to destroy the instance on the hypervisor. [ 839.847329] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.848482] env[68285]: DEBUG nova.compute.manager [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.848568] env[68285]: DEBUG nova.network.neutron [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.960795] env[68285]: DEBUG oslo_vmware.api [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235189} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.961300] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.961744] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.961952] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.962145] env[68285]: INFO nova.compute.manager [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Took 1.13 seconds to destroy the instance on the hypervisor. [ 839.962496] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.962569] env[68285]: DEBUG nova.compute.manager [-] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.962685] env[68285]: DEBUG nova.network.neutron [-] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.997767] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891254, 'name': CreateVM_Task, 'duration_secs': 0.401512} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.997947] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.998669] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.998828] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.999350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 839.999477] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddc57975-1020-45b4-8864-e993c4c4d26c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.004987] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 840.004987] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c231c9-c0bf-632a-0a15-805b91e9b124" [ 840.004987] env[68285]: _type = "Task" [ 840.004987] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.015311] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c231c9-c0bf-632a-0a15-805b91e9b124, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.178253] env[68285]: DEBUG nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.290893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad929a30-5fb8-4303-9ee2-ed0e0bd30cb1 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012 tempest-FloatingIPsAssociationNegativeTestJSON-1836045012-project-member] Lock "9f4b2b94-ec19-4a8e-8663-ab71c417d093" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.454s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.325204] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e463786f-26ec-4ff4-a6e7-64a8a4119f1e tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.445s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.363057] env[68285]: DEBUG nova.compute.manager [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.363057] env[68285]: DEBUG nova.compute.manager [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing instance network info cache due to event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 840.363057] env[68285]: DEBUG oslo_concurrency.lockutils [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] Acquiring lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.363057] env[68285]: DEBUG oslo_concurrency.lockutils [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] Acquired lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.363363] env[68285]: DEBUG nova.network.neutron [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.387176] env[68285]: DEBUG nova.compute.manager [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Received event network-changed-b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.387176] env[68285]: DEBUG nova.compute.manager [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Refreshing instance network info cache due to event network-changed-b67172eb-4f98-4870-a433-22f6e238cbf4. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 840.387176] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Acquiring lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.387176] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Acquired lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.387176] env[68285]: DEBUG nova.network.neutron [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Refreshing network info cache for port b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.481613] env[68285]: DEBUG nova.network.neutron [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updated VIF entry in instance network info cache for port 320c995b-dad7-40a2-90c1-1e0f3065e6cb. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.481966] env[68285]: DEBUG nova.network.neutron [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updating instance_info_cache with network_info: [{"id": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "address": "fa:16:3e:90:33:19", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320c995b-da", "ovs_interfaceid": "320c995b-dad7-40a2-90c1-1e0f3065e6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.515874] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c231c9-c0bf-632a-0a15-805b91e9b124, 'name': SearchDatastore_Task, 'duration_secs': 0.010949} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.516181] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.516408] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.516638] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.516781] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.516958] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.517253] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3873cd57-09ab-4cf5-a848-db6306b58d3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.530032] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.530032] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.530032] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a164a465-502a-4c93-b4e3-b78bd75f9670 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.533437] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 840.533437] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52aabe40-fecb-4cae-d84f-4e94433d7ac9" [ 840.533437] env[68285]: _type = "Task" [ 840.533437] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.542233] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52aabe40-fecb-4cae-d84f-4e94433d7ac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.704384] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.835620] env[68285]: DEBUG nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.868553] env[68285]: DEBUG nova.network.neutron [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.963614] env[68285]: DEBUG nova.network.neutron [-] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.985510] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb16ec0d-2a9b-4d5a-8a45-4a5ec08987ff req-e25e8d91-2631-4ebd-9dbf-da885ea00b0e service nova] Releasing lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.046374] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52aabe40-fecb-4cae-d84f-4e94433d7ac9, 'name': SearchDatastore_Task, 'duration_secs': 0.010137} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.051485] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbb66f37-772f-4616-b744-e46f4727381a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.058329] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 841.058329] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52808f2b-d9b5-e506-afaf-e3cf465a1ae6" [ 841.058329] env[68285]: _type = "Task" [ 841.058329] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.070973] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52808f2b-d9b5-e506-afaf-e3cf465a1ae6, 'name': SearchDatastore_Task} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.071234] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.071492] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 34aeba05-804e-444c-8e58-69c7721b10b1/34aeba05-804e-444c-8e58-69c7721b10b1.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.071768] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8f8b6cc-3682-48d3-ad3d-2e56a129c861 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.080918] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 841.080918] env[68285]: value = "task-2891255" [ 841.080918] env[68285]: _type = "Task" [ 841.080918] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.097488] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.258553] env[68285]: DEBUG nova.network.neutron [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updated VIF entry in instance network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.259140] env[68285]: DEBUG nova.network.neutron [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [{"id": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "address": "fa:16:3e:75:c1:88", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b795cd1-99", "ovs_interfaceid": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.265741] env[68285]: DEBUG nova.network.neutron [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updated VIF entry in instance network info cache for port b67172eb-4f98-4870-a433-22f6e238cbf4. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.266113] env[68285]: DEBUG nova.network.neutron [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.369162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.372834] env[68285]: INFO nova.compute.manager [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Took 1.52 seconds to deallocate network for instance. [ 841.400427] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1fba22-5fe9-4e15-bc92-520e371d6f36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.409666] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0acbd82a-c974-4c2e-b870-7d74c058c5fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.447173] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d494ff1f-21e5-42ad-b19a-9f7672668ddb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.457038] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7ce88a-257a-4fa3-b402-bddd218a96f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.472698] env[68285]: INFO nova.compute.manager [-] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Took 1.51 seconds to deallocate network for instance. [ 841.473312] env[68285]: DEBUG nova.compute.provider_tree [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.594348] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484796} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.594626] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 34aeba05-804e-444c-8e58-69c7721b10b1/34aeba05-804e-444c-8e58-69c7721b10b1.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.594848] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.595106] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2207918a-f74e-4ac3-997d-8dd18f2d6374 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.603351] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 841.603351] env[68285]: value = "task-2891256" [ 841.603351] env[68285]: _type = "Task" [ 841.603351] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.611995] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891256, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.765859] env[68285]: DEBUG oslo_concurrency.lockutils [req-63fbfd54-c202-486e-9b18-5df6331be46a req-c387a6c1-af8e-4a49-ae10-8560422ab8c0 service nova] Releasing lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.772838] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Releasing lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.773209] env[68285]: DEBUG nova.compute.manager [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-vif-plugged-57f2793d-ad69-4e92-9f57-d7c6255ff40d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.773509] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Acquiring lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.773702] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.773903] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.774122] env[68285]: DEBUG nova.compute.manager [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] No waiting events found dispatching network-vif-plugged-57f2793d-ad69-4e92-9f57-d7c6255ff40d {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 841.774594] env[68285]: WARNING nova.compute.manager [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received unexpected event network-vif-plugged-57f2793d-ad69-4e92-9f57-d7c6255ff40d for instance with vm_state building and task_state spawning. [ 841.774594] env[68285]: DEBUG nova.compute.manager [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-changed-57f2793d-ad69-4e92-9f57-d7c6255ff40d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.774854] env[68285]: DEBUG nova.compute.manager [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Refreshing instance network info cache due to event network-changed-57f2793d-ad69-4e92-9f57-d7c6255ff40d. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 841.775106] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Acquiring lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.775291] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Acquired lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.775493] env[68285]: DEBUG nova.network.neutron [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Refreshing network info cache for port 57f2793d-ad69-4e92-9f57-d7c6255ff40d {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.780371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "65f289bb-6e97-47ad-8531-c06a9cce302f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.780371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.780700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "65f289bb-6e97-47ad-8531-c06a9cce302f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.780700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.780809] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.783408] env[68285]: INFO nova.compute.manager [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Terminating instance [ 841.877834] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Successfully updated port: 1b81cd45-5a3e-4884-af46-ea57107a812b {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.879758] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.977635] env[68285]: DEBUG nova.scheduler.client.report [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 841.981516] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.115501] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891256, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069126} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.116346] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.117123] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d996b3aa-e01f-42cc-afa7-fdffbb7d2d09 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.142818] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 34aeba05-804e-444c-8e58-69c7721b10b1/34aeba05-804e-444c-8e58-69c7721b10b1.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.143161] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db7559a1-db9e-4bb4-a062-33539eeabecd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.168134] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 842.168134] env[68285]: value = "task-2891257" [ 842.168134] env[68285]: _type = "Task" [ 842.168134] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.178866] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.289858] env[68285]: DEBUG nova.compute.manager [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 842.290427] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 842.291061] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4551f0d-8c4d-45ae-ba29-abedc627a70c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.300429] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.300707] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8ca40e8-cab1-4676-bcf3-2ab42c86b826 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.309037] env[68285]: DEBUG oslo_vmware.api [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 842.309037] env[68285]: value = "task-2891258" [ 842.309037] env[68285]: _type = "Task" [ 842.309037] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.318927] env[68285]: DEBUG oslo_vmware.api [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.379241] env[68285]: DEBUG nova.network.neutron [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.381514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.483360] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.483943] env[68285]: DEBUG nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 842.487095] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.183s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.487342] env[68285]: DEBUG nova.objects.instance [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lazy-loading 'resources' on Instance uuid ef0636f4-3149-44e8-a4a3-62b9ede5dc28 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.585589] env[68285]: DEBUG nova.network.neutron [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.680955] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891257, 'name': ReconfigVM_Task, 'duration_secs': 0.400371} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.681307] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 34aeba05-804e-444c-8e58-69c7721b10b1/34aeba05-804e-444c-8e58-69c7721b10b1.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.682223] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46a19275-827c-4110-835d-d0662c61a3ca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.690224] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 842.690224] env[68285]: value = "task-2891259" [ 842.690224] env[68285]: _type = "Task" [ 842.690224] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.699659] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891259, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.821250] env[68285]: DEBUG oslo_vmware.api [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891258, 'name': PowerOffVM_Task, 'duration_secs': 0.402618} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.823039] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.823376] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 842.826752] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8485646-578d-4d0b-bfef-1f881e5e749b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.862523] env[68285]: DEBUG nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Received event network-vif-deleted-320c995b-dad7-40a2-90c1-1e0f3065e6cb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.862523] env[68285]: DEBUG nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-vif-plugged-1b81cd45-5a3e-4884-af46-ea57107a812b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.862523] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Acquiring lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.862523] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.862523] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.862690] env[68285]: DEBUG nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] No waiting events found dispatching network-vif-plugged-1b81cd45-5a3e-4884-af46-ea57107a812b {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 842.862690] env[68285]: WARNING nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received unexpected event network-vif-plugged-1b81cd45-5a3e-4884-af46-ea57107a812b for instance with vm_state building and task_state spawning. [ 842.862690] env[68285]: DEBUG nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.862690] env[68285]: DEBUG nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing instance network info cache due to event network-changed-3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 842.862690] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Acquiring lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.862877] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Acquired lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.862877] env[68285]: DEBUG nova.network.neutron [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Refreshing network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.880042] env[68285]: DEBUG nova.compute.manager [req-54981679-6da7-49a1-93aa-5e44f586c549 req-7d1a90c8-21f4-45ff-91e3-3a9d8395b026 service nova] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Received event network-vif-deleted-13f01d57-9418-46a8-90cb-0fa78c30305f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.922546] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 842.922546] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 842.922546] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Deleting the datastore file [datastore1] 65f289bb-6e97-47ad-8531-c06a9cce302f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.922546] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62db299b-9c19-412c-b1aa-9c2f2e4ee8d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.932060] env[68285]: DEBUG oslo_vmware.api [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for the task: (returnval){ [ 842.932060] env[68285]: value = "task-2891261" [ 842.932060] env[68285]: _type = "Task" [ 842.932060] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.942270] env[68285]: DEBUG oslo_vmware.api [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.990789] env[68285]: DEBUG nova.compute.utils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 842.999024] env[68285]: DEBUG nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 842.999024] env[68285]: DEBUG nova.network.neutron [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 843.083804] env[68285]: DEBUG nova.policy [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7840556165649e3be7ea35a7fb7e968', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bde4f3e3864d4a4c9b0df9edcaf258e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 843.090322] env[68285]: DEBUG oslo_concurrency.lockutils [req-0f547afd-5258-4f07-bd37-ba3e09eb2dba req-ccbf4801-c179-42c2-a585-543f4ce1a67e service nova] Releasing lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.090695] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.090847] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.201691] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891259, 'name': Rename_Task, 'duration_secs': 0.166807} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.204477] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.204936] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03141229-e6ad-4bbb-b6b9-105a4d258362 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.216551] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 843.216551] env[68285]: value = "task-2891262" [ 843.216551] env[68285]: _type = "Task" [ 843.216551] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.228816] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891262, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.330637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.450127] env[68285]: DEBUG oslo_vmware.api [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Task: {'id': task-2891261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173258} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.452444] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.452444] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.452444] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.452444] env[68285]: INFO nova.compute.manager [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 843.452444] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 843.452738] env[68285]: DEBUG nova.compute.manager [-] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 843.452738] env[68285]: DEBUG nova.network.neutron [-] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.497559] env[68285]: DEBUG nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 843.588301] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4eb1b8-7978-4481-917c-dad9fea87310 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.612068] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a239bc55-9a19-437c-b01b-b6b82e194491 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.651279] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc48d860-af03-42fa-82bf-8026838347d7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.660453] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13790b1-b6cf-4a47-9eff-e3dcee1644e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.678620] env[68285]: DEBUG nova.compute.provider_tree [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.726664] env[68285]: DEBUG oslo_vmware.api [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891262, 'name': PowerOnVM_Task, 'duration_secs': 0.488154} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.726927] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 843.727194] env[68285]: INFO nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Took 7.02 seconds to spawn the instance on the hypervisor. [ 843.727381] env[68285]: DEBUG nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 843.728264] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4755b659-127f-4997-8db8-809a9587e78c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.799264] env[68285]: DEBUG nova.network.neutron [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updated VIF entry in instance network info cache for port 3b795cd1-99e2-4a06-9607-e71ca33d19ff. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.799815] env[68285]: DEBUG nova.network.neutron [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [{"id": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "address": "fa:16:3e:75:c1:88", "network": {"id": "23f2c3a4-b609-4f2b-82ea-30d2f16df8e6", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2087738678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53261bb9432948b58692227101a4717b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2c019b6-3ef3-4c8f-95bd-edede2c554a9", "external-id": "nsx-vlan-transportzone-364", "segmentation_id": 364, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b795cd1-99", "ovs_interfaceid": "3b795cd1-99e2-4a06-9607-e71ca33d19ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.896493] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.011963] env[68285]: DEBUG nova.network.neutron [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Successfully created port: 90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.186072] env[68285]: DEBUG nova.scheduler.client.report [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.250622] env[68285]: INFO nova.compute.manager [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Took 32.65 seconds to build instance. [ 844.303158] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Releasing lock "refresh_cache-11de7da5-1d73-4536-b2a1-f7dbbdec14b8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.303447] env[68285]: DEBUG nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-changed-1b81cd45-5a3e-4884-af46-ea57107a812b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.303615] env[68285]: DEBUG nova.compute.manager [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Refreshing instance network info cache due to event network-changed-1b81cd45-5a3e-4884-af46-ea57107a812b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 844.303796] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Acquiring lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.346577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.346886] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.504889] env[68285]: DEBUG nova.network.neutron [-] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.516403] env[68285]: DEBUG nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 844.549842] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.550105] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.550267] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.550445] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.550618] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.550791] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.551008] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.551208] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.551392] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.551615] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.551821] env[68285]: DEBUG nova.virt.hardware [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.552764] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e0f0c6-c7b6-4892-a178-97abd2a36543 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.563226] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b892a42-e4c0-4e36-acea-73320cbbfb91 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.692640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.693743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.320s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.695069] env[68285]: INFO nova.compute.claims [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.730736] env[68285]: INFO nova.scheduler.client.report [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Deleted allocations for instance ef0636f4-3149-44e8-a4a3-62b9ede5dc28 [ 844.757480] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9e3f70fc-e497-45a1-bf55-9179a252c02f tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.380s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.780579] env[68285]: DEBUG nova.network.neutron [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Updating instance_info_cache with network_info: [{"id": "bb64687c-e2d8-4813-9c8a-01b6904040e3", "address": "fa:16:3e:4c:d3:36", "network": {"id": "f222a190-c77e-4c09-8469-38f8774db1ae", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1029505673", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64687c-e2", "ovs_interfaceid": "bb64687c-e2d8-4813-9c8a-01b6904040e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "address": "fa:16:3e:e5:fb:4f", "network": {"id": "2148d25f-40d0-4388-b5d4-7baa98dbc1e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1346940619", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f2793d-ad", "ovs_interfaceid": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b81cd45-5a3e-4884-af46-ea57107a812b", "address": "fa:16:3e:1c:bc:2f", "network": {"id": "f222a190-c77e-4c09-8469-38f8774db1ae", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1029505673", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b81cd45-5a", "ovs_interfaceid": "1b81cd45-5a3e-4884-af46-ea57107a812b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.909228] env[68285]: DEBUG nova.compute.manager [req-2aaec71b-a036-4eef-bbec-3bcacc003855 req-98472199-dfc3-4e9a-a2ef-70de41bb8601 service nova] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Received event network-vif-deleted-bc6618d2-82b3-4803-802e-d16377e5423f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.008645] env[68285]: INFO nova.compute.manager [-] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Took 1.56 seconds to deallocate network for instance. [ 845.241965] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e834cb5-9a16-4f2c-a188-9e381445233e tempest-ImagesOneServerTestJSON-234777968 tempest-ImagesOneServerTestJSON-234777968-project-member] Lock "ef0636f4-3149-44e8-a4a3-62b9ede5dc28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.851s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.262674] env[68285]: DEBUG nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 845.282277] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Releasing lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.282680] env[68285]: DEBUG nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Instance network_info: |[{"id": "bb64687c-e2d8-4813-9c8a-01b6904040e3", "address": "fa:16:3e:4c:d3:36", "network": {"id": "f222a190-c77e-4c09-8469-38f8774db1ae", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1029505673", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64687c-e2", "ovs_interfaceid": "bb64687c-e2d8-4813-9c8a-01b6904040e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "address": "fa:16:3e:e5:fb:4f", "network": {"id": "2148d25f-40d0-4388-b5d4-7baa98dbc1e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1346940619", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f2793d-ad", "ovs_interfaceid": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b81cd45-5a3e-4884-af46-ea57107a812b", "address": "fa:16:3e:1c:bc:2f", "network": {"id": "f222a190-c77e-4c09-8469-38f8774db1ae", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1029505673", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b81cd45-5a", "ovs_interfaceid": "1b81cd45-5a3e-4884-af46-ea57107a812b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 845.283752] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Acquired lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.283948] env[68285]: DEBUG nova.network.neutron [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Refreshing network info cache for port 1b81cd45-5a3e-4884-af46-ea57107a812b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.285137] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:d3:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb64687c-e2d8-4813-9c8a-01b6904040e3', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:fb:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfae3ef8-cae7-455d-8632-ba93e1671625', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57f2793d-ad69-4e92-9f57-d7c6255ff40d', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:bc:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b81cd45-5a3e-4884-af46-ea57107a812b', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.296503] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Creating folder: Project (ae4430b997b4480abbf2c5fce71cca04). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.300250] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a08bc99f-9f3b-4963-8a6c-c601c886f1d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.314102] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Created folder: Project (ae4430b997b4480abbf2c5fce71cca04) in parent group-v580775. [ 845.314359] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Creating folder: Instances. Parent ref: group-v580855. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 845.314624] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07872d29-9b1f-4577-891e-acd7e82d78a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.326371] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Created folder: Instances in parent group-v580855. [ 845.326660] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 845.326881] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.327434] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbebfc36-a985-4159-b942-aa389fb51915 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.353054] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.353054] env[68285]: value = "task-2891265" [ 845.353054] env[68285]: _type = "Task" [ 845.353054] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.358138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "c8784827-a928-439d-abdf-d82b62a61152" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.358138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "c8784827-a928-439d-abdf-d82b62a61152" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.358138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "c8784827-a928-439d-abdf-d82b62a61152-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.358138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "c8784827-a928-439d-abdf-d82b62a61152-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.358138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "c8784827-a928-439d-abdf-d82b62a61152-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.360095] env[68285]: INFO nova.compute.manager [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Terminating instance [ 845.365601] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891265, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.515211] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.719569] env[68285]: DEBUG nova.compute.manager [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Received event network-changed-b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.719569] env[68285]: DEBUG nova.compute.manager [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Refreshing instance network info cache due to event network-changed-b67172eb-4f98-4870-a433-22f6e238cbf4. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 845.719774] env[68285]: DEBUG oslo_concurrency.lockutils [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] Acquiring lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.719930] env[68285]: DEBUG oslo_concurrency.lockutils [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] Acquired lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.720083] env[68285]: DEBUG nova.network.neutron [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Refreshing network info cache for port b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.744533] env[68285]: DEBUG nova.network.neutron [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Updated VIF entry in instance network info cache for port 1b81cd45-5a3e-4884-af46-ea57107a812b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.745052] env[68285]: DEBUG nova.network.neutron [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Updating instance_info_cache with network_info: [{"id": "bb64687c-e2d8-4813-9c8a-01b6904040e3", "address": "fa:16:3e:4c:d3:36", "network": {"id": "f222a190-c77e-4c09-8469-38f8774db1ae", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1029505673", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64687c-e2", "ovs_interfaceid": "bb64687c-e2d8-4813-9c8a-01b6904040e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "address": "fa:16:3e:e5:fb:4f", "network": {"id": "2148d25f-40d0-4388-b5d4-7baa98dbc1e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1346940619", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f2793d-ad", "ovs_interfaceid": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b81cd45-5a3e-4884-af46-ea57107a812b", "address": "fa:16:3e:1c:bc:2f", "network": {"id": "f222a190-c77e-4c09-8469-38f8774db1ae", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1029505673", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b81cd45-5a", "ovs_interfaceid": "1b81cd45-5a3e-4884-af46-ea57107a812b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.790394] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.867367] env[68285]: DEBUG nova.compute.manager [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 845.867789] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 845.868391] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891265, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.871611] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf17c037-fa8d-445a-adb2-80f56f46ed18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.882401] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 845.882687] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88f7b6b3-eff2-4d69-bc3d-7496e5690983 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.895440] env[68285]: DEBUG oslo_vmware.api [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 845.895440] env[68285]: value = "task-2891266" [ 845.895440] env[68285]: _type = "Task" [ 845.895440] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.906688] env[68285]: DEBUG oslo_vmware.api [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891266, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.034780] env[68285]: DEBUG nova.network.neutron [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Successfully updated port: 90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.248278] env[68285]: DEBUG oslo_concurrency.lockutils [req-f9f21010-20b4-4807-968c-f86220445010 req-63a3ad8b-b479-44d6-b095-921f18129b6d service nova] Releasing lock "refresh_cache-1c42043d-f8db-4cb9-8147-48d0d32c982b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.345607] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0a9446-07f0-45f4-aadc-ca9c2552099d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.354091] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ada7d7-57a5-45bb-a5b8-1250d1a54e80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.366867] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891265, 'name': CreateVM_Task, 'duration_secs': 0.958586} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.392985] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 846.396587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.396755] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.397087] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 846.397866] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d02a7d-d67e-4863-8600-2f42e892a6b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.400465] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96644d25-6bd7-453e-82ee-3badcf85157f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.409714] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 846.409714] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529614b4-eb47-ba16-172d-1076496d82cb" [ 846.409714] env[68285]: _type = "Task" [ 846.409714] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.415083] env[68285]: DEBUG oslo_vmware.api [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891266, 'name': PowerOffVM_Task, 'duration_secs': 0.248773} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.418506] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 846.418748] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 846.420088] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2751ffcc-72e8-4b0b-bcc6-f1093cb934ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.424298] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9bea9dd-7b19-49bc-aee0-9893f3ecd0b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.444536] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529614b4-eb47-ba16-172d-1076496d82cb, 'name': SearchDatastore_Task, 'duration_secs': 0.012613} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.445121] env[68285]: DEBUG nova.compute.provider_tree [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.448430] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.448430] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.448430] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.448430] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.448430] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.448430] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9e9bdb7-fb50-4fdb-997b-7df0e860527d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.463049] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.463264] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.464017] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7ef01ee-0df9-4ea3-96e7-735148d0250d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.473488] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 846.473488] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52da34b0-0fd5-8e22-99ae-a558c17b3876" [ 846.473488] env[68285]: _type = "Task" [ 846.473488] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.483418] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52da34b0-0fd5-8e22-99ae-a558c17b3876, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.493944] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 846.494200] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 846.494391] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Deleting the datastore file [datastore2] c8784827-a928-439d-abdf-d82b62a61152 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 846.494655] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d31602cd-9234-487a-9b76-4e0c8001ebff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.503049] env[68285]: DEBUG oslo_vmware.api [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for the task: (returnval){ [ 846.503049] env[68285]: value = "task-2891268" [ 846.503049] env[68285]: _type = "Task" [ 846.503049] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.514053] env[68285]: DEBUG oslo_vmware.api [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.541583] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.541754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquired lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.541886] env[68285]: DEBUG nova.network.neutron [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.948732] env[68285]: DEBUG nova.scheduler.client.report [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 846.988591] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52da34b0-0fd5-8e22-99ae-a558c17b3876, 'name': SearchDatastore_Task, 'duration_secs': 0.012072} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.992811] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b29f0ccd-8b8a-438b-a738-bb282a523ce1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.001060] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 847.001060] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520c63d9-524d-3226-dcaf-24bcf7090d5b" [ 847.001060] env[68285]: _type = "Task" [ 847.001060] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.008349] env[68285]: DEBUG nova.network.neutron [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updated VIF entry in instance network info cache for port b67172eb-4f98-4870-a433-22f6e238cbf4. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.008604] env[68285]: DEBUG nova.network.neutron [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.022789] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520c63d9-524d-3226-dcaf-24bcf7090d5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.026676] env[68285]: DEBUG oslo_vmware.api [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.108032] env[68285]: DEBUG nova.network.neutron [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.462196] env[68285]: DEBUG nova.network.neutron [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Updating instance_info_cache with network_info: [{"id": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "address": "fa:16:3e:51:21:55", "network": {"id": "5f8bb7cf-027c-4e6d-aa8c-a3dae87f45f6", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1414159614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde4f3e3864d4a4c9b0df9edcaf258e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90718536-f9", "ovs_interfaceid": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.462196] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.768s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.464065] env[68285]: DEBUG nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 847.470689] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.454s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.470969] env[68285]: DEBUG nova.objects.instance [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lazy-loading 'resources' on Instance uuid 81fe4854-1094-4c42-9df5-05325d961146 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.512221] env[68285]: DEBUG oslo_concurrency.lockutils [req-03b67ab8-6fc8-4844-933a-ca71e094a742 req-e8d52b3b-c388-49c1-a7cd-0784a2c7b93a service nova] Releasing lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.512728] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520c63d9-524d-3226-dcaf-24bcf7090d5b, 'name': SearchDatastore_Task, 'duration_secs': 0.02596} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.517718] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.517718] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1c42043d-f8db-4cb9-8147-48d0d32c982b/1c42043d-f8db-4cb9-8147-48d0d32c982b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.517718] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a0afe05-35c5-47a0-8556-07d04f984a87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.525415] env[68285]: DEBUG oslo_vmware.api [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Task: {'id': task-2891268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.523489} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.528195] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 847.528195] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 847.528195] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 847.528195] env[68285]: INFO nova.compute.manager [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] [instance: c8784827-a928-439d-abdf-d82b62a61152] Took 1.66 seconds to destroy the instance on the hypervisor. [ 847.528195] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.528195] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 847.528195] env[68285]: value = "task-2891269" [ 847.528195] env[68285]: _type = "Task" [ 847.528195] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.528195] env[68285]: DEBUG nova.compute.manager [-] [instance: c8784827-a928-439d-abdf-d82b62a61152] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 847.528195] env[68285]: DEBUG nova.network.neutron [-] [instance: c8784827-a928-439d-abdf-d82b62a61152] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 847.539518] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.753342] env[68285]: DEBUG nova.compute.manager [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Received event network-vif-plugged-90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.753342] env[68285]: DEBUG oslo_concurrency.lockutils [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] Acquiring lock "a2a7590d-c415-4955-8a25-4b1411449557-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.753342] env[68285]: DEBUG oslo_concurrency.lockutils [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] Lock "a2a7590d-c415-4955-8a25-4b1411449557-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.753577] env[68285]: DEBUG oslo_concurrency.lockutils [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] Lock "a2a7590d-c415-4955-8a25-4b1411449557-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.753730] env[68285]: DEBUG nova.compute.manager [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] No waiting events found dispatching network-vif-plugged-90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 847.753891] env[68285]: WARNING nova.compute.manager [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Received unexpected event network-vif-plugged-90718536-f982-4e0c-8bc8-5ce84e9f0a55 for instance with vm_state building and task_state spawning. [ 847.754254] env[68285]: DEBUG nova.compute.manager [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Received event network-changed-90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.754254] env[68285]: DEBUG nova.compute.manager [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Refreshing instance network info cache due to event network-changed-90718536-f982-4e0c-8bc8-5ce84e9f0a55. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 847.754453] env[68285]: DEBUG oslo_concurrency.lockutils [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] Acquiring lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.970175] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Releasing lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.970512] env[68285]: DEBUG nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Instance network_info: |[{"id": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "address": "fa:16:3e:51:21:55", "network": {"id": "5f8bb7cf-027c-4e6d-aa8c-a3dae87f45f6", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1414159614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde4f3e3864d4a4c9b0df9edcaf258e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90718536-f9", "ovs_interfaceid": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 847.970811] env[68285]: DEBUG oslo_concurrency.lockutils [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] Acquired lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.970984] env[68285]: DEBUG nova.network.neutron [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Refreshing network info cache for port 90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.972326] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:21:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90718536-f982-4e0c-8bc8-5ce84e9f0a55', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.981987] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Creating folder: Project (bde4f3e3864d4a4c9b0df9edcaf258e7). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.985677] env[68285]: DEBUG nova.compute.utils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 847.989488] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aaf1c202-91af-4bcb-8d5b-b86e5c429ddd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.992147] env[68285]: DEBUG nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 847.992409] env[68285]: DEBUG nova.network.neutron [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 848.008087] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Created folder: Project (bde4f3e3864d4a4c9b0df9edcaf258e7) in parent group-v580775. [ 848.008429] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Creating folder: Instances. Parent ref: group-v580858. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.009142] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59cd44e9-42f4-4a26-b0be-94a95f33d0c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.026339] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Created folder: Instances in parent group-v580858. [ 848.026638] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.026914] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 848.027204] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-879a9ce2-1792-4e97-9996-1c198c48a8f7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.058745] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891269, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.059240] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.059240] env[68285]: value = "task-2891272" [ 848.059240] env[68285]: _type = "Task" [ 848.059240] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.074162] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891272, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.137978] env[68285]: DEBUG nova.policy [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70340a4cc3df49ff971f299e439a1581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d03ce152e74cec8910b12d34ad8ba6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 848.442388] env[68285]: DEBUG nova.network.neutron [-] [instance: c8784827-a928-439d-abdf-d82b62a61152] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.495110] env[68285]: DEBUG nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 848.503264] env[68285]: DEBUG nova.network.neutron [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Updated VIF entry in instance network info cache for port 90718536-f982-4e0c-8bc8-5ce84e9f0a55. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.503675] env[68285]: DEBUG nova.network.neutron [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Updating instance_info_cache with network_info: [{"id": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "address": "fa:16:3e:51:21:55", "network": {"id": "5f8bb7cf-027c-4e6d-aa8c-a3dae87f45f6", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1414159614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde4f3e3864d4a4c9b0df9edcaf258e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90718536-f9", "ovs_interfaceid": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.560711] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891269, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.66302} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.566482] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1c42043d-f8db-4cb9-8147-48d0d32c982b/1c42043d-f8db-4cb9-8147-48d0d32c982b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.577612] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.577612] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02a089e1-e7e0-4b0b-84bf-9ac629d7c7d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.577612] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891272, 'name': CreateVM_Task, 'duration_secs': 0.346343} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.577612] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.577612] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.577612] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.580840] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 848.580840] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a06c4f97-8832-487f-853a-0b4fe7c1fd6f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.581365] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 848.581365] env[68285]: value = "task-2891273" [ 848.581365] env[68285]: _type = "Task" [ 848.581365] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.590992] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 848.590992] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d0f960-7930-0a25-05ef-f0e8a990186c" [ 848.590992] env[68285]: _type = "Task" [ 848.590992] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.599273] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891273, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.608858] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d0f960-7930-0a25-05ef-f0e8a990186c, 'name': SearchDatastore_Task, 'duration_secs': 0.01431} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.609162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.609432] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.609651] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.609755] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.609927] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.610228] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b1de1e2-a3ca-4682-98c2-0ba59aa1f4a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.620718] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.621021] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.621667] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-844eeedf-fbe5-4ea9-bfe1-483b375750bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.630454] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 848.630454] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5231428e-268d-b5ea-95f3-521c585e6aa1" [ 848.630454] env[68285]: _type = "Task" [ 848.630454] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.642684] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5231428e-268d-b5ea-95f3-521c585e6aa1, 'name': SearchDatastore_Task, 'duration_secs': 0.010758} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.643524] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d384108c-c1ca-4e9e-bd28-ba4738218c53 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.650090] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 848.650090] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52df9764-6681-15ed-f8d9-1635a7f1e6ae" [ 848.650090] env[68285]: _type = "Task" [ 848.650090] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.663513] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52df9764-6681-15ed-f8d9-1635a7f1e6ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.715089] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3a7e57-ef74-4b8d-9b50-1ea34c63038e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.723631] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de89410-6080-4671-a3f9-44594828c0f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.756822] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d6a83a-b951-4bf7-8848-8722b7d1367b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.768297] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205f91e8-dfef-4b2c-a753-f6b573b4f1e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.775705] env[68285]: DEBUG nova.network.neutron [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Successfully created port: 25885f8e-e0d5-491d-a099-409ae53d20c1 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.787898] env[68285]: DEBUG nova.compute.provider_tree [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 848.946110] env[68285]: INFO nova.compute.manager [-] [instance: c8784827-a928-439d-abdf-d82b62a61152] Took 1.42 seconds to deallocate network for instance. [ 849.006565] env[68285]: DEBUG oslo_concurrency.lockutils [req-defc788b-b360-4ad2-a5db-dbe6f89d8a7f req-efeee898-3184-47d9-bbbd-db629ad5324b service nova] Releasing lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.095250] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891273, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079607} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.095537] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.100018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2985b1-b021-4108-a6ff-1ca61a235f98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.127366] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 1c42043d-f8db-4cb9-8147-48d0d32c982b/1c42043d-f8db-4cb9-8147-48d0d32c982b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.127710] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6703c02-3f4a-4bdf-a780-cfd8dd2673a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.151450] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 849.151450] env[68285]: value = "task-2891274" [ 849.151450] env[68285]: _type = "Task" [ 849.151450] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.164148] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891274, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.167303] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52df9764-6681-15ed-f8d9-1635a7f1e6ae, 'name': SearchDatastore_Task, 'duration_secs': 0.013527} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.167524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.167779] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a2a7590d-c415-4955-8a25-4b1411449557/a2a7590d-c415-4955-8a25-4b1411449557.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.168675] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28429986-fb78-4ab0-bc0b-4fc278c166a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.176237] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 849.176237] env[68285]: value = "task-2891275" [ 849.176237] env[68285]: _type = "Task" [ 849.176237] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.187545] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.324447] env[68285]: ERROR nova.scheduler.client.report [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [req-1869e611-bffd-48f8-8a1c-7f680740ce47] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1869e611-bffd-48f8-8a1c-7f680740ce47"}]} [ 849.342959] env[68285]: DEBUG nova.scheduler.client.report [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 849.357398] env[68285]: DEBUG nova.scheduler.client.report [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 849.357554] env[68285]: DEBUG nova.compute.provider_tree [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 849.372350] env[68285]: DEBUG nova.scheduler.client.report [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 849.398714] env[68285]: DEBUG nova.scheduler.client.report [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 849.454376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.503940] env[68285]: DEBUG nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 849.543453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.543453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.548250] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 849.548617] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.548768] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 849.548940] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.549388] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 849.549578] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 849.549805] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 849.549941] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 849.551877] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 849.552058] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 849.552561] env[68285]: DEBUG nova.virt.hardware [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 849.556801] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a835443-dc4b-41df-8be0-a1ef5fb4d413 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.571543] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a31af97-ae6e-4629-8d1c-ee2d0c4e93e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.670713] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891274, 'name': ReconfigVM_Task, 'duration_secs': 0.341397} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.671047] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 1c42043d-f8db-4cb9-8147-48d0d32c982b/1c42043d-f8db-4cb9-8147-48d0d32c982b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.671753] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6234b571-90f3-450a-b3a6-dbd072fa88a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.684757] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 849.684757] env[68285]: value = "task-2891276" [ 849.684757] env[68285]: _type = "Task" [ 849.684757] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.693527] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891275, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.702838] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891276, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.829714] env[68285]: DEBUG nova.compute.manager [req-052d25cc-b59f-4910-b297-5e1e94f97074 req-3de46f50-da48-4aba-95dc-c4edd4c265d8 service nova] [instance: c8784827-a928-439d-abdf-d82b62a61152] Received event network-vif-deleted-24fc61fe-8e63-4459-8435-25d4c23e10f2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.081619] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b173afd-8aea-46d4-b04f-6aa7641fca87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.091538] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8341ff2b-e24e-4a5c-8dce-500f48e3208f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.128512] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789ab9d2-99de-4a35-af86-74b0594358c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.135092] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40ae112-1f83-48ad-81c9-e6b465b40baa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.150283] env[68285]: DEBUG nova.compute.provider_tree [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.192415] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.986284} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.193146] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a2a7590d-c415-4955-8a25-4b1411449557/a2a7590d-c415-4955-8a25-4b1411449557.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.193404] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.193658] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3739715d-0e80-4a5c-8e0d-0b82a352c255 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.199409] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891276, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.206864] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 850.206864] env[68285]: value = "task-2891277" [ 850.206864] env[68285]: _type = "Task" [ 850.206864] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.218731] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891277, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.678331] env[68285]: DEBUG nova.network.neutron [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Successfully updated port: 25885f8e-e0d5-491d-a099-409ae53d20c1 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.698669] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891276, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.705125] env[68285]: DEBUG nova.scheduler.client.report [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 850.705357] env[68285]: DEBUG nova.compute.provider_tree [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 55 to 56 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 850.705550] env[68285]: DEBUG nova.compute.provider_tree [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.721024] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891277, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070603} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.721024] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.721024] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cc3c02-c2fa-4a66-8a42-f42b5cb37376 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.747023] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] a2a7590d-c415-4955-8a25-4b1411449557/a2a7590d-c415-4955-8a25-4b1411449557.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.747023] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a588b69a-46d0-429f-ba5c-dcc720203ccc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.772618] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 850.772618] env[68285]: value = "task-2891278" [ 850.772618] env[68285]: _type = "Task" [ 850.772618] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.784289] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891278, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.181432] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "refresh_cache-1b9dd0e2-781f-43d7-a66e-e718a0972c78" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.183253] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "refresh_cache-1b9dd0e2-781f-43d7-a66e-e718a0972c78" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.183253] env[68285]: DEBUG nova.network.neutron [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.199413] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891276, 'name': Rename_Task, 'duration_secs': 1.186985} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.199693] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.199957] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1f9dbe6-a44a-4612-a601-627a3b2f5aef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.207733] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 851.207733] env[68285]: value = "task-2891279" [ 851.207733] env[68285]: _type = "Task" [ 851.207733] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.211888] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.741s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.213918] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.668s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.219173] env[68285]: INFO nova.compute.claims [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.225446] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891279, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.238284] env[68285]: INFO nova.scheduler.client.report [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleted allocations for instance 81fe4854-1094-4c42-9df5-05325d961146 [ 851.284215] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891278, 'name': ReconfigVM_Task, 'duration_secs': 0.379365} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.284215] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Reconfigured VM instance instance-0000001f to attach disk [datastore2] a2a7590d-c415-4955-8a25-4b1411449557/a2a7590d-c415-4955-8a25-4b1411449557.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.284798] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d38e792c-7c53-4e33-a070-f6f4c2c25365 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.295845] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 851.295845] env[68285]: value = "task-2891280" [ 851.295845] env[68285]: _type = "Task" [ 851.295845] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.309427] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891280, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.388816] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "d1b5abfa-fd38-4d17-b75f-5036af841d24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.389325] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.725547] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891279, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.729636] env[68285]: DEBUG nova.network.neutron [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.749435] env[68285]: DEBUG oslo_concurrency.lockutils [None req-780b1495-7bfb-47fc-a1d3-4c9a4eb0aa27 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "81fe4854-1094-4c42-9df5-05325d961146" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.653s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.808295] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891280, 'name': Rename_Task, 'duration_secs': 0.426635} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.808590] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.808839] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad147d18-8a33-4f38-a826-8482d3bdc83d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.816938] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 851.816938] env[68285]: value = "task-2891281" [ 851.816938] env[68285]: _type = "Task" [ 851.816938] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.826943] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.957439] env[68285]: DEBUG nova.compute.manager [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Received event network-vif-plugged-25885f8e-e0d5-491d-a099-409ae53d20c1 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.957676] env[68285]: DEBUG oslo_concurrency.lockutils [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] Acquiring lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.957911] env[68285]: DEBUG oslo_concurrency.lockutils [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.958252] env[68285]: DEBUG oslo_concurrency.lockutils [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.958530] env[68285]: DEBUG nova.compute.manager [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] No waiting events found dispatching network-vif-plugged-25885f8e-e0d5-491d-a099-409ae53d20c1 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 851.958805] env[68285]: WARNING nova.compute.manager [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Received unexpected event network-vif-plugged-25885f8e-e0d5-491d-a099-409ae53d20c1 for instance with vm_state building and task_state spawning. [ 851.958985] env[68285]: DEBUG nova.compute.manager [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Received event network-changed-25885f8e-e0d5-491d-a099-409ae53d20c1 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.959180] env[68285]: DEBUG nova.compute.manager [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Refreshing instance network info cache due to event network-changed-25885f8e-e0d5-491d-a099-409ae53d20c1. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 851.959365] env[68285]: DEBUG oslo_concurrency.lockutils [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] Acquiring lock "refresh_cache-1b9dd0e2-781f-43d7-a66e-e718a0972c78" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.981022] env[68285]: DEBUG nova.network.neutron [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Updating instance_info_cache with network_info: [{"id": "25885f8e-e0d5-491d-a099-409ae53d20c1", "address": "fa:16:3e:08:6d:2b", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25885f8e-e0", "ovs_interfaceid": "25885f8e-e0d5-491d-a099-409ae53d20c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.223615] env[68285]: DEBUG oslo_vmware.api [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891279, 'name': PowerOnVM_Task, 'duration_secs': 0.568328} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.223870] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.224082] env[68285]: INFO nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Took 18.26 seconds to spawn the instance on the hypervisor. [ 852.224322] env[68285]: DEBUG nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 852.225344] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7eee8c8-c225-44dd-96ab-fe8c0eb9e211 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.328804] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891281, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.484222] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "refresh_cache-1b9dd0e2-781f-43d7-a66e-e718a0972c78" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.484569] env[68285]: DEBUG nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Instance network_info: |[{"id": "25885f8e-e0d5-491d-a099-409ae53d20c1", "address": "fa:16:3e:08:6d:2b", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25885f8e-e0", "ovs_interfaceid": "25885f8e-e0d5-491d-a099-409ae53d20c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 852.487405] env[68285]: DEBUG oslo_concurrency.lockutils [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] Acquired lock "refresh_cache-1b9dd0e2-781f-43d7-a66e-e718a0972c78" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.487594] env[68285]: DEBUG nova.network.neutron [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Refreshing network info cache for port 25885f8e-e0d5-491d-a099-409ae53d20c1 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 852.488779] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:6d:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25885f8e-e0d5-491d-a099-409ae53d20c1', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.505016] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.506698] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.506698] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca49b5ec-594a-44c8-b3cc-2971bbc2e3e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.532100] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.532100] env[68285]: value = "task-2891282" [ 852.532100] env[68285]: _type = "Task" [ 852.532100] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.544844] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891282, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.753313] env[68285]: INFO nova.compute.manager [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Took 42.80 seconds to build instance. [ 852.817403] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e72f7e0-2484-4aaf-a17d-a6aba0c1e431 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.832729] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54943665-a7ee-416d-af58-bfa26ababb44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.836458] env[68285]: DEBUG oslo_vmware.api [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891281, 'name': PowerOnVM_Task, 'duration_secs': 1.012675} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.836740] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.836907] env[68285]: INFO nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Took 8.32 seconds to spawn the instance on the hypervisor. [ 852.837138] env[68285]: DEBUG nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 852.838286] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d426a3ed-f321-4f84-bd11-b84a14ab24ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.866545] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49f9157-4d94-458a-b5da-eea9521449e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.879232] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f8e3b8-ae7e-4dab-8a50-ece85903bbc3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.898501] env[68285]: DEBUG nova.compute.provider_tree [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 853.048371] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891282, 'name': CreateVM_Task, 'duration_secs': 0.403458} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.048371] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.048371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.048371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.048371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 853.048371] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc3f7229-8d06-4d26-abb7-7c52eff1943f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.053282] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 853.053282] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52acaa00-c083-e106-5801-809fb69108d1" [ 853.053282] env[68285]: _type = "Task" [ 853.053282] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.064015] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52acaa00-c083-e106-5801-809fb69108d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.258131] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2ee12ec6-7c1e-4555-a709-d0c19adaea8c tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.253s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.331690] env[68285]: DEBUG nova.network.neutron [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Updated VIF entry in instance network info cache for port 25885f8e-e0d5-491d-a099-409ae53d20c1. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.332047] env[68285]: DEBUG nova.network.neutron [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Updating instance_info_cache with network_info: [{"id": "25885f8e-e0d5-491d-a099-409ae53d20c1", "address": "fa:16:3e:08:6d:2b", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25885f8e-e0", "ovs_interfaceid": "25885f8e-e0d5-491d-a099-409ae53d20c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.385233] env[68285]: INFO nova.compute.manager [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Took 32.02 seconds to build instance. [ 853.421443] env[68285]: ERROR nova.scheduler.client.report [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [req-0b0ce4f7-183b-442b-9f9f-f2d40c233d5b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0b0ce4f7-183b-442b-9f9f-f2d40c233d5b"}]} [ 853.437903] env[68285]: DEBUG nova.scheduler.client.report [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 853.454642] env[68285]: DEBUG nova.scheduler.client.report [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 853.454642] env[68285]: DEBUG nova.compute.provider_tree [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 853.465543] env[68285]: DEBUG nova.scheduler.client.report [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 853.482274] env[68285]: DEBUG nova.scheduler.client.report [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 853.565394] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52acaa00-c083-e106-5801-809fb69108d1, 'name': SearchDatastore_Task, 'duration_secs': 0.011788} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.566026] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.566026] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.566953] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.566953] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.566953] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.566953] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6790b057-fb25-4fdc-a549-36edce6de6e5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.580169] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.580392] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.581150] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-591192f7-6663-4e27-936e-2c6d688c9422 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.591213] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 853.591213] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b0421c-214b-6f3f-8486-e47baccf39e7" [ 853.591213] env[68285]: _type = "Task" [ 853.591213] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.601696] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b0421c-214b-6f3f-8486-e47baccf39e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.705635] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "1c42043d-f8db-4cb9-8147-48d0d32c982b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.706613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.706613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.706613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.706770] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.711448] env[68285]: INFO nova.compute.manager [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Terminating instance [ 853.760472] env[68285]: DEBUG nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 853.838720] env[68285]: DEBUG oslo_concurrency.lockutils [req-38a71a07-de00-43e2-a154-3f3f3a3780c1 req-439d673b-bc09-4f44-afa0-5bb6ff69c199 service nova] Releasing lock "refresh_cache-1b9dd0e2-781f-43d7-a66e-e718a0972c78" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.887219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-21c3d61a-38e6-4a59-a492-55818fec6e99 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "a2a7590d-c415-4955-8a25-4b1411449557" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.665s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.038708] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfc743f-16c0-4404-9245-0380f07f0003 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.047111] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9ec7b2-a7d2-4282-af17-56b22e0b25ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.083189] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4030b8c9-e5cf-4ead-8cc8-8b4d740d72f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.092733] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673baa78-753d-4bde-8e6b-e9042fd73927 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.110020] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b0421c-214b-6f3f-8486-e47baccf39e7, 'name': SearchDatastore_Task, 'duration_secs': 0.014707} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.117420] env[68285]: DEBUG nova.compute.provider_tree [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 854.119011] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef504f79-8a32-464d-9022-7625d500c0a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.128522] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 854.128522] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f83091-251f-cd20-5bcf-6ad6450fd486" [ 854.128522] env[68285]: _type = "Task" [ 854.128522] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.139170] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f83091-251f-cd20-5bcf-6ad6450fd486, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.218453] env[68285]: DEBUG nova.compute.manager [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 854.218710] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.219675] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d5a948-3969-488b-abff-62c2a8671791 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.230836] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.230836] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0d48544-803f-4b7c-a5ad-bfc6a5f5666a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.238728] env[68285]: DEBUG oslo_vmware.api [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 854.238728] env[68285]: value = "task-2891283" [ 854.238728] env[68285]: _type = "Task" [ 854.238728] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.247819] env[68285]: DEBUG oslo_vmware.api [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.287354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.327162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "ee45231a-80f2-49b9-8bc7-03a0c920a668" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.327507] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.327849] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "ee45231a-80f2-49b9-8bc7-03a0c920a668-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.328561] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.328561] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.330877] env[68285]: INFO nova.compute.manager [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Terminating instance [ 854.391021] env[68285]: DEBUG nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 854.490021] env[68285]: DEBUG nova.compute.manager [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Received event network-changed-90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 854.490232] env[68285]: DEBUG nova.compute.manager [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Refreshing instance network info cache due to event network-changed-90718536-f982-4e0c-8bc8-5ce84e9f0a55. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 854.490451] env[68285]: DEBUG oslo_concurrency.lockutils [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] Acquiring lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.490594] env[68285]: DEBUG oslo_concurrency.lockutils [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] Acquired lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.490751] env[68285]: DEBUG nova.network.neutron [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Refreshing network info cache for port 90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.627839] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.628170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.628436] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.628661] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.628871] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.635783] env[68285]: INFO nova.compute.manager [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Terminating instance [ 854.647626] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f83091-251f-cd20-5bcf-6ad6450fd486, 'name': SearchDatastore_Task, 'duration_secs': 0.011242} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.647898] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.648187] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 1b9dd0e2-781f-43d7-a66e-e718a0972c78/1b9dd0e2-781f-43d7-a66e-e718a0972c78.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.648495] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb9c664c-a2c3-4708-85b0-a0042dbd83b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.659476] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 854.659476] env[68285]: value = "task-2891284" [ 854.659476] env[68285]: _type = "Task" [ 854.659476] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.665352] env[68285]: DEBUG nova.scheduler.client.report [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 57 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 854.665352] env[68285]: DEBUG nova.compute.provider_tree [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 57 to 58 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 854.665352] env[68285]: DEBUG nova.compute.provider_tree [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 854.677320] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.748843] env[68285]: DEBUG oslo_vmware.api [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891283, 'name': PowerOffVM_Task, 'duration_secs': 0.303884} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.749402] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.749607] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 854.749865] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-160ac45a-ff1b-46c5-9a1a-c1678be67c48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.835831] env[68285]: DEBUG nova.compute.manager [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 854.836303] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.837796] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b930569-f9da-4154-b710-46fee57f3231 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.847403] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.847699] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65d2f1fb-f527-4949-aa20-94bfa60ebf3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.855603] env[68285]: DEBUG oslo_vmware.api [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 854.855603] env[68285]: value = "task-2891286" [ 854.855603] env[68285]: _type = "Task" [ 854.855603] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.865405] env[68285]: DEBUG oslo_vmware.api [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.909535] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 854.909872] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 854.910165] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Deleting the datastore file [datastore1] 1c42043d-f8db-4cb9-8147-48d0d32c982b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.910489] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a369de33-ed93-4cb1-b303-02df5d6c2ceb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.915400] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.918975] env[68285]: DEBUG oslo_vmware.api [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 854.918975] env[68285]: value = "task-2891287" [ 854.918975] env[68285]: _type = "Task" [ 854.918975] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.930451] env[68285]: DEBUG oslo_vmware.api [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.141408] env[68285]: DEBUG nova.compute.manager [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 855.141669] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.142622] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb867ce5-b423-483d-928c-9595cd2b9981 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.152480] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.152781] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfe2196e-91a1-4ad7-8431-c17003081101 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.173802] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.960s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.174436] env[68285]: DEBUG nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 855.177534] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891284, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.184356] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.493s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.184580] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.004s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.184868] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 855.185108] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.472s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.186714] env[68285]: INFO nova.compute.claims [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.194115] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b531636f-7be5-42d2-b0fd-56119b0ffec5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.199102] env[68285]: DEBUG oslo_vmware.api [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 855.199102] env[68285]: value = "task-2891288" [ 855.199102] env[68285]: _type = "Task" [ 855.199102] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.210771] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32f28a4-8b1d-4c5b-9c43-667bab66f0ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.215722] env[68285]: DEBUG oslo_vmware.api [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.227893] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab73144-928d-4e20-bb31-04c3f6a27fef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.237181] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f071c88a-6916-406b-af97-9737927daa47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.270976] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178537MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 855.271219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.368491] env[68285]: DEBUG oslo_vmware.api [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891286, 'name': PowerOffVM_Task, 'duration_secs': 0.327704} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.368868] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.369104] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.369357] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fb2c458-ef55-49ec-8df8-d642e1e22015 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.407047] env[68285]: DEBUG nova.network.neutron [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Updated VIF entry in instance network info cache for port 90718536-f982-4e0c-8bc8-5ce84e9f0a55. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.407518] env[68285]: DEBUG nova.network.neutron [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Updating instance_info_cache with network_info: [{"id": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "address": "fa:16:3e:51:21:55", "network": {"id": "5f8bb7cf-027c-4e6d-aa8c-a3dae87f45f6", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1414159614-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde4f3e3864d4a4c9b0df9edcaf258e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90718536-f9", "ovs_interfaceid": "90718536-f982-4e0c-8bc8-5ce84e9f0a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.430996] env[68285]: DEBUG oslo_vmware.api [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364575} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.431372] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.431641] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.432060] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.432317] env[68285]: INFO nova.compute.manager [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Took 1.21 seconds to destroy the instance on the hypervisor. [ 855.432613] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.432890] env[68285]: DEBUG nova.compute.manager [-] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 855.433017] env[68285]: DEBUG nova.network.neutron [-] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.468863] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.469132] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.469341] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleting the datastore file [datastore2] ee45231a-80f2-49b9-8bc7-03a0c920a668 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.469644] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c54fb02-2319-414f-959d-1e7c38edae0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.478192] env[68285]: DEBUG oslo_vmware.api [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 855.478192] env[68285]: value = "task-2891290" [ 855.478192] env[68285]: _type = "Task" [ 855.478192] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.486622] env[68285]: DEBUG oslo_vmware.api [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.670897] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891284, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54046} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.671283] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 1b9dd0e2-781f-43d7-a66e-e718a0972c78/1b9dd0e2-781f-43d7-a66e-e718a0972c78.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.671544] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.671830] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c752a837-1c83-404d-a162-c7d605fe9df7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.679716] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 855.679716] env[68285]: value = "task-2891291" [ 855.679716] env[68285]: _type = "Task" [ 855.679716] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.690113] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.694756] env[68285]: DEBUG nova.compute.utils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 855.697090] env[68285]: DEBUG nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 855.697090] env[68285]: DEBUG nova.network.neutron [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 855.712830] env[68285]: DEBUG oslo_vmware.api [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891288, 'name': PowerOffVM_Task, 'duration_secs': 0.270552} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.713977] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.714228] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.714672] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35df9708-7a6b-4cab-8515-266b92c9241e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.790388] env[68285]: DEBUG nova.policy [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08b855c4d4af42cba6a5dfeb2f9dcd11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca04723be164bd6bc8759280a25797d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 855.796750] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.797608] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.797867] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleting the datastore file [datastore1] 0d13cc84-bbf2-4e8b-8344-d69acac6bd35 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.798398] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9ab8a94-677b-4437-9086-78c1cf6f2789 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.809427] env[68285]: DEBUG oslo_vmware.api [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for the task: (returnval){ [ 855.809427] env[68285]: value = "task-2891293" [ 855.809427] env[68285]: _type = "Task" [ 855.809427] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.819410] env[68285]: DEBUG oslo_vmware.api [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.911826] env[68285]: DEBUG oslo_concurrency.lockutils [req-19507762-7501-46f7-88b6-3dd681f9d753 req-27093ef9-fd31-4167-9084-39d16187b52d service nova] Releasing lock "refresh_cache-a2a7590d-c415-4955-8a25-4b1411449557" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.924893] env[68285]: DEBUG nova.compute.manager [req-3b664120-2337-48bf-9bfb-ced51ce64762 req-70888b9d-b793-41c4-870a-02341acefd2d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-vif-deleted-bb64687c-e2d8-4813-9c8a-01b6904040e3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 855.925310] env[68285]: INFO nova.compute.manager [req-3b664120-2337-48bf-9bfb-ced51ce64762 req-70888b9d-b793-41c4-870a-02341acefd2d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Neutron deleted interface bb64687c-e2d8-4813-9c8a-01b6904040e3; detaching it from the instance and deleting it from the info cache [ 855.925712] env[68285]: DEBUG nova.network.neutron [req-3b664120-2337-48bf-9bfb-ced51ce64762 req-70888b9d-b793-41c4-870a-02341acefd2d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Updating instance_info_cache with network_info: [{"id": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "address": "fa:16:3e:e5:fb:4f", "network": {"id": "2148d25f-40d0-4388-b5d4-7baa98dbc1e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1346940619", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f2793d-ad", "ovs_interfaceid": "57f2793d-ad69-4e92-9f57-d7c6255ff40d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b81cd45-5a3e-4884-af46-ea57107a812b", "address": "fa:16:3e:1c:bc:2f", "network": {"id": "f222a190-c77e-4c09-8469-38f8774db1ae", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1029505673", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b81cd45-5a", "ovs_interfaceid": "1b81cd45-5a3e-4884-af46-ea57107a812b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.992270] env[68285]: DEBUG oslo_vmware.api [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174128} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.992321] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.992666] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.993030] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.993651] env[68285]: INFO nova.compute.manager [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Took 1.16 seconds to destroy the instance on the hypervisor. [ 855.994175] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.994893] env[68285]: DEBUG nova.compute.manager [-] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 855.995086] env[68285]: DEBUG nova.network.neutron [-] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.194487] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075434} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.194895] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.195886] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7c75b6-7418-49bc-aefb-0bf514a93fda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.200246] env[68285]: DEBUG nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 856.223836] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 1b9dd0e2-781f-43d7-a66e-e718a0972c78/1b9dd0e2-781f-43d7-a66e-e718a0972c78.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.224661] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dccbf27-83e0-402e-9a39-c7f502e71bd1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.252756] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 856.252756] env[68285]: value = "task-2891294" [ 856.252756] env[68285]: _type = "Task" [ 856.252756] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.264587] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.320060] env[68285]: DEBUG oslo_vmware.api [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Task: {'id': task-2891293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151046} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.322917] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.323239] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.323355] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.323642] env[68285]: INFO nova.compute.manager [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Took 1.18 seconds to destroy the instance on the hypervisor. [ 856.323789] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.324431] env[68285]: DEBUG nova.compute.manager [-] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 856.324532] env[68285]: DEBUG nova.network.neutron [-] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.411851] env[68285]: DEBUG nova.network.neutron [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Successfully created port: c3761ed0-eacf-4744-a549-4868f00f2bb5 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.430576] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bcc7d93e-7521-400f-9ddb-a1ca6e67c227 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.444352] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9795d0-8826-4e4c-9479-3c2c5afca52b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.488890] env[68285]: DEBUG nova.compute.manager [req-3b664120-2337-48bf-9bfb-ced51ce64762 req-70888b9d-b793-41c4-870a-02341acefd2d service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Detach interface failed, port_id=bb64687c-e2d8-4813-9c8a-01b6904040e3, reason: Instance 1c42043d-f8db-4cb9-8147-48d0d32c982b could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 856.519873] env[68285]: DEBUG nova.compute.manager [req-d0e9d447-ff83-4320-bd60-0722aad5c62d req-103ec313-b70f-45ce-85d3-aab19b18c7b9 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Received event network-vif-deleted-47d51556-cb83-406c-ad00-883c1493aa5f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.520064] env[68285]: INFO nova.compute.manager [req-d0e9d447-ff83-4320-bd60-0722aad5c62d req-103ec313-b70f-45ce-85d3-aab19b18c7b9 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Neutron deleted interface 47d51556-cb83-406c-ad00-883c1493aa5f; detaching it from the instance and deleting it from the info cache [ 856.520407] env[68285]: DEBUG nova.network.neutron [req-d0e9d447-ff83-4320-bd60-0722aad5c62d req-103ec313-b70f-45ce-85d3-aab19b18c7b9 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.757603] env[68285]: DEBUG nova.network.neutron [-] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.767473] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891294, 'name': ReconfigVM_Task, 'duration_secs': 0.316114} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.767934] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 1b9dd0e2-781f-43d7-a66e-e718a0972c78/1b9dd0e2-781f-43d7-a66e-e718a0972c78.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.769321] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4963bdca-4f35-4a31-8621-59c9ba99ae73 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.778379] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 856.778379] env[68285]: value = "task-2891295" [ 856.778379] env[68285]: _type = "Task" [ 856.778379] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.790445] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891295, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.808819] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091b50ca-f826-4c2b-971c-ef3ed429bef8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.818187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acad7369-53ec-4f51-b7ed-c3c1d7e43cba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.850021] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac1b37d-122a-4b9c-a7eb-81794245ae9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.858504] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cda542a-7936-4298-9364-8fc8760a80ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.872880] env[68285]: DEBUG nova.compute.provider_tree [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.023071] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3a03310-b9ea-46e6-adc3-f3b36ab8f256 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.034329] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddefc9d4-25a9-48e6-955b-c2bee9eef42a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.071753] env[68285]: DEBUG nova.network.neutron [-] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.073041] env[68285]: DEBUG nova.compute.manager [req-d0e9d447-ff83-4320-bd60-0722aad5c62d req-103ec313-b70f-45ce-85d3-aab19b18c7b9 service nova] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Detach interface failed, port_id=47d51556-cb83-406c-ad00-883c1493aa5f, reason: Instance ee45231a-80f2-49b9-8bc7-03a0c920a668 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 857.228455] env[68285]: DEBUG nova.network.neutron [-] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.233790] env[68285]: DEBUG nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 857.259146] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 857.259399] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.259555] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 857.259734] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.259877] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 857.262059] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 857.262348] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 857.262524] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 857.262696] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 857.262860] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 857.263043] env[68285]: DEBUG nova.virt.hardware [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 857.263765] env[68285]: INFO nova.compute.manager [-] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Took 1.27 seconds to deallocate network for instance. [ 857.264562] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fc5aab-7256-4f55-bdfe-ba23a4775b55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.280020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5288b9f9-f5a9-4504-a52e-74ccebff36b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.289654] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891295, 'name': Rename_Task, 'duration_secs': 0.166223} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.297403] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.297855] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-763f3871-fc1c-4fbf-a16b-d1b410013d05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.305567] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 857.305567] env[68285]: value = "task-2891296" [ 857.305567] env[68285]: _type = "Task" [ 857.305567] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.314558] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.377053] env[68285]: DEBUG nova.scheduler.client.report [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.575253] env[68285]: INFO nova.compute.manager [-] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Took 1.25 seconds to deallocate network for instance. [ 857.734205] env[68285]: INFO nova.compute.manager [-] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Took 2.30 seconds to deallocate network for instance. [ 857.774350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.816760] env[68285]: DEBUG oslo_vmware.api [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891296, 'name': PowerOnVM_Task, 'duration_secs': 0.486849} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.817041] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 857.817319] env[68285]: INFO nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Took 8.31 seconds to spawn the instance on the hypervisor. [ 857.817508] env[68285]: DEBUG nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 857.818360] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63f9c90-82cd-40cb-be98-21d5ec1862ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.882391] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.882622] env[68285]: DEBUG nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 857.885368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.679s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.885614] env[68285]: DEBUG nova.objects.instance [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lazy-loading 'resources' on Instance uuid f0145d64-60e4-4ad5-a6ea-6c5d40780df5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.963425] env[68285]: DEBUG nova.compute.manager [req-c7d4ddd7-713b-45b6-b794-d2b0412aedb1 req-5e972954-fab6-4d2d-a9ad-1f347e25c64b service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-vif-deleted-57f2793d-ad69-4e92-9f57-d7c6255ff40d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 857.963689] env[68285]: DEBUG nova.compute.manager [req-c7d4ddd7-713b-45b6-b794-d2b0412aedb1 req-5e972954-fab6-4d2d-a9ad-1f347e25c64b service nova] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Received event network-vif-deleted-1b81cd45-5a3e-4884-af46-ea57107a812b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 858.086017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.205015] env[68285]: DEBUG nova.network.neutron [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Successfully updated port: c3761ed0-eacf-4744-a549-4868f00f2bb5 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.242029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.334560] env[68285]: INFO nova.compute.manager [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Took 34.99 seconds to build instance. [ 858.389175] env[68285]: DEBUG nova.compute.utils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 858.393176] env[68285]: DEBUG nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 858.393347] env[68285]: DEBUG nova.network.neutron [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 858.431253] env[68285]: DEBUG nova.policy [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08b855c4d4af42cba6a5dfeb2f9dcd11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca04723be164bd6bc8759280a25797d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 858.546023] env[68285]: DEBUG nova.compute.manager [req-d01c6a3e-2e46-4df2-ae0a-392518d2d390 req-9ad97a2c-d572-4252-a2c5-f978c8821e1b service nova] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Received event network-vif-deleted-64daa1ff-7232-4d18-9bb9-18f48ce5df20 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 858.706260] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.706402] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.706552] env[68285]: DEBUG nova.network.neutron [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.714921] env[68285]: DEBUG nova.network.neutron [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Successfully created port: 1f09289d-57ed-49a6-a446-81bdfee585f3 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.837206] env[68285]: DEBUG oslo_concurrency.lockutils [None req-14f81937-4598-4fa1-b79c-f26767491ee7 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.619s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.894367] env[68285]: DEBUG nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 858.975834] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6665b4-827f-42e4-8e9a-1b5d608e4b62 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.984496] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c678d61-b389-495d-ac05-569710fa8a12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.016392] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b7301e-4fa8-430d-9257-02dc26c9645b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.024341] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1634f2-aa32-4528-a4ca-6862e6f10667 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.039622] env[68285]: DEBUG nova.compute.provider_tree [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.264539] env[68285]: DEBUG nova.network.neutron [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.343183] env[68285]: DEBUG nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 859.467913] env[68285]: DEBUG nova.network.neutron [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Updating instance_info_cache with network_info: [{"id": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "address": "fa:16:3e:d4:7c:0f", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3761ed0-ea", "ovs_interfaceid": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.543263] env[68285]: DEBUG nova.scheduler.client.report [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.840729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.840910] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.866313] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.908548] env[68285]: DEBUG nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 859.935056] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 859.935449] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.935720] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 859.936054] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.936304] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 859.936540] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 859.936859] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 859.937266] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 859.937645] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 859.937851] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 859.938047] env[68285]: DEBUG nova.virt.hardware [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 859.939168] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca357d0-00f9-4cfd-abe3-d9bf6a30d87b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.948232] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184c3b15-2401-4cbf-bb58-c261405dfb7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.970656] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.970962] env[68285]: DEBUG nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Instance network_info: |[{"id": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "address": "fa:16:3e:d4:7c:0f", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3761ed0-ea", "ovs_interfaceid": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 859.971348] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:7c:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bcd9d2d-25c8-41ad-9a4a-93b9029ba993', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3761ed0-eacf-4744-a549-4868f00f2bb5', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.978652] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Creating folder: Project (1ca04723be164bd6bc8759280a25797d). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 859.979276] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89d1d834-42ff-49ac-9c78-ff7f4df4d2be {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.991881] env[68285]: DEBUG nova.compute.manager [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Received event network-vif-plugged-c3761ed0-eacf-4744-a549-4868f00f2bb5 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.992098] env[68285]: DEBUG oslo_concurrency.lockutils [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] Acquiring lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.992301] env[68285]: DEBUG oslo_concurrency.lockutils [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.992465] env[68285]: DEBUG oslo_concurrency.lockutils [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.992626] env[68285]: DEBUG nova.compute.manager [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] No waiting events found dispatching network-vif-plugged-c3761ed0-eacf-4744-a549-4868f00f2bb5 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 859.992784] env[68285]: WARNING nova.compute.manager [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Received unexpected event network-vif-plugged-c3761ed0-eacf-4744-a549-4868f00f2bb5 for instance with vm_state building and task_state spawning. [ 859.992940] env[68285]: DEBUG nova.compute.manager [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Received event network-changed-c3761ed0-eacf-4744-a549-4868f00f2bb5 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.993101] env[68285]: DEBUG nova.compute.manager [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Refreshing instance network info cache due to event network-changed-c3761ed0-eacf-4744-a549-4868f00f2bb5. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 859.993276] env[68285]: DEBUG oslo_concurrency.lockutils [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] Acquiring lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.993408] env[68285]: DEBUG oslo_concurrency.lockutils [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] Acquired lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 859.993567] env[68285]: DEBUG nova.network.neutron [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Refreshing network info cache for port c3761ed0-eacf-4744-a549-4868f00f2bb5 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.996702] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Created folder: Project (1ca04723be164bd6bc8759280a25797d) in parent group-v580775. [ 859.996702] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Creating folder: Instances. Parent ref: group-v580862. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 859.997036] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39597f5a-cdb3-40a8-b899-296b966332e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.008731] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Created folder: Instances in parent group-v580862. [ 860.008911] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 860.009107] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.009309] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab5ba53c-9d31-4aa8-a1ad-ec0f526eeb4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.029635] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.029635] env[68285]: value = "task-2891299" [ 860.029635] env[68285]: _type = "Task" [ 860.029635] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.037786] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891299, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.049246] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.051695] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.316s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.053161] env[68285]: INFO nova.compute.claims [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.072379] env[68285]: INFO nova.scheduler.client.report [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Deleted allocations for instance f0145d64-60e4-4ad5-a6ea-6c5d40780df5 [ 860.540566] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891299, 'name': CreateVM_Task, 'duration_secs': 0.438913} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.541045] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.541452] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.541616] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.541938] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 860.542217] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52163414-f1d1-4193-8eb6-489d933a7764 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.548408] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 860.548408] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52179bcc-d19c-cf15-11dd-e903cc4d9bab" [ 860.548408] env[68285]: _type = "Task" [ 860.548408] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.562820] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52179bcc-d19c-cf15-11dd-e903cc4d9bab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.579557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72ec91b5-720c-4e38-b4fb-d01269cd050c tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "f0145d64-60e4-4ad5-a6ea-6c5d40780df5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.596s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.847938] env[68285]: DEBUG nova.network.neutron [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Successfully updated port: 1f09289d-57ed-49a6-a446-81bdfee585f3 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.910478] env[68285]: DEBUG nova.network.neutron [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Updated VIF entry in instance network info cache for port c3761ed0-eacf-4744-a549-4868f00f2bb5. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.910817] env[68285]: DEBUG nova.network.neutron [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Updating instance_info_cache with network_info: [{"id": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "address": "fa:16:3e:d4:7c:0f", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3761ed0-ea", "ovs_interfaceid": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.060087] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52179bcc-d19c-cf15-11dd-e903cc4d9bab, 'name': SearchDatastore_Task, 'duration_secs': 0.014795} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.060606] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.060921] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.061085] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.061246] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.061426] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.061679] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b913827-6f47-49f6-8b84-d899d6e6e298 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.076622] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.076823] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.077701] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7c502b6-8881-4d48-b89a-d9c0fc68af23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.084382] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 861.084382] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b42c4c-7c3f-87d5-f5e9-2afe77c88910" [ 861.084382] env[68285]: _type = "Task" [ 861.084382] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.094309] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b42c4c-7c3f-87d5-f5e9-2afe77c88910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.301564] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.301821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.302035] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.302230] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.302393] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.304292] env[68285]: INFO nova.compute.manager [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Terminating instance [ 861.353800] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "refresh_cache-437a18da-8fe4-478e-82a0-3b1a9da47df8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.353935] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "refresh_cache-437a18da-8fe4-478e-82a0-3b1a9da47df8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.354114] env[68285]: DEBUG nova.network.neutron [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 861.413798] env[68285]: DEBUG oslo_concurrency.lockutils [req-467d188e-f345-4899-a87e-1bac7a8c5e2e req-2be27427-8ec2-4bb7-9d93-bd2347ecb6d2 service nova] Releasing lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.474836] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ee80c7-0b50-4e73-89de-68fc2f83658a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.483055] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5457a6-62dd-4d8c-8761-c5c29cb8560a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.514522] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8248ec80-fd2c-4170-b868-a1160e4a55b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.522920] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9654dc-c866-43b0-8655-e7d8136f4559 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.537394] env[68285]: DEBUG nova.compute.provider_tree [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.595208] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b42c4c-7c3f-87d5-f5e9-2afe77c88910, 'name': SearchDatastore_Task, 'duration_secs': 0.057686} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.595984] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f340d0e-8965-4377-bdc4-6c5e0a827542 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.601292] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 861.601292] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520525f2-896f-a99f-192a-cf264a9d6e08" [ 861.601292] env[68285]: _type = "Task" [ 861.601292] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.609555] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520525f2-896f-a99f-192a-cf264a9d6e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.808078] env[68285]: DEBUG nova.compute.manager [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 861.808445] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.809521] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829a2377-1abc-41de-a364-c75faba0f050 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.817989] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.818302] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c468119-e798-4748-8fcf-7d47f02c3a13 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.825964] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 861.825964] env[68285]: value = "task-2891300" [ 861.825964] env[68285]: _type = "Task" [ 861.825964] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.836541] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.902136] env[68285]: DEBUG nova.network.neutron [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.022264] env[68285]: DEBUG nova.compute.manager [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Received event network-vif-plugged-1f09289d-57ed-49a6-a446-81bdfee585f3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 862.022488] env[68285]: DEBUG oslo_concurrency.lockutils [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] Acquiring lock "437a18da-8fe4-478e-82a0-3b1a9da47df8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.022690] env[68285]: DEBUG oslo_concurrency.lockutils [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.022853] env[68285]: DEBUG oslo_concurrency.lockutils [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.023027] env[68285]: DEBUG nova.compute.manager [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] No waiting events found dispatching network-vif-plugged-1f09289d-57ed-49a6-a446-81bdfee585f3 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 862.023200] env[68285]: WARNING nova.compute.manager [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Received unexpected event network-vif-plugged-1f09289d-57ed-49a6-a446-81bdfee585f3 for instance with vm_state building and task_state spawning. [ 862.023354] env[68285]: DEBUG nova.compute.manager [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Received event network-changed-1f09289d-57ed-49a6-a446-81bdfee585f3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 862.023501] env[68285]: DEBUG nova.compute.manager [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Refreshing instance network info cache due to event network-changed-1f09289d-57ed-49a6-a446-81bdfee585f3. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 862.023661] env[68285]: DEBUG oslo_concurrency.lockutils [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] Acquiring lock "refresh_cache-437a18da-8fe4-478e-82a0-3b1a9da47df8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.040042] env[68285]: DEBUG nova.scheduler.client.report [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.112089] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520525f2-896f-a99f-192a-cf264a9d6e08, 'name': SearchDatastore_Task, 'duration_secs': 0.047999} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.112398] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.112663] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] b0f32ce2-92fd-4290-a2f4-e5658f775f4f/b0f32ce2-92fd-4290-a2f4-e5658f775f4f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.112923] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9af87482-6884-4a8d-9259-c18c5c224a8e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.120579] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 862.120579] env[68285]: value = "task-2891301" [ 862.120579] env[68285]: _type = "Task" [ 862.120579] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.124420] env[68285]: DEBUG nova.network.neutron [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Updating instance_info_cache with network_info: [{"id": "1f09289d-57ed-49a6-a446-81bdfee585f3", "address": "fa:16:3e:8e:11:b3", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f09289d-57", "ovs_interfaceid": "1f09289d-57ed-49a6-a446-81bdfee585f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.133151] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891301, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.337210] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891300, 'name': PowerOffVM_Task, 'duration_secs': 0.224083} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.337735] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.337735] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.338085] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bd5da2f-0dda-4685-8bfd-f55b77aa2edd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.424926] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.425272] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.425496] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Deleting the datastore file [datastore2] fe9a8a13-73ec-4556-a62c-cc49fd01f539 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.425767] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a98d666f-a4ca-4ad6-9bf9-1c2b06c9a13c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.436749] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for the task: (returnval){ [ 862.436749] env[68285]: value = "task-2891303" [ 862.436749] env[68285]: _type = "Task" [ 862.436749] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.445485] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.545382] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.545919] env[68285]: DEBUG nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 862.548606] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.456s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.550785] env[68285]: INFO nova.compute.claims [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.628888] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "refresh_cache-437a18da-8fe4-478e-82a0-3b1a9da47df8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.629246] env[68285]: DEBUG nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Instance network_info: |[{"id": "1f09289d-57ed-49a6-a446-81bdfee585f3", "address": "fa:16:3e:8e:11:b3", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f09289d-57", "ovs_interfaceid": "1f09289d-57ed-49a6-a446-81bdfee585f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 862.632882] env[68285]: DEBUG oslo_concurrency.lockutils [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] Acquired lock "refresh_cache-437a18da-8fe4-478e-82a0-3b1a9da47df8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.633090] env[68285]: DEBUG nova.network.neutron [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Refreshing network info cache for port 1f09289d-57ed-49a6-a446-81bdfee585f3 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.634298] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:11:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bcd9d2d-25c8-41ad-9a4a-93b9029ba993', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f09289d-57ed-49a6-a446-81bdfee585f3', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 862.641740] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 862.641952] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891301, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.645450] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 862.645914] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19484f37-318b-4f9e-93a9-31e74f85b2bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.667920] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 862.667920] env[68285]: value = "task-2891304" [ 862.667920] env[68285]: _type = "Task" [ 862.667920] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.677522] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891304, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.924721] env[68285]: DEBUG nova.network.neutron [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Updated VIF entry in instance network info cache for port 1f09289d-57ed-49a6-a446-81bdfee585f3. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.925074] env[68285]: DEBUG nova.network.neutron [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Updating instance_info_cache with network_info: [{"id": "1f09289d-57ed-49a6-a446-81bdfee585f3", "address": "fa:16:3e:8e:11:b3", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f09289d-57", "ovs_interfaceid": "1f09289d-57ed-49a6-a446-81bdfee585f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.949524] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.054713] env[68285]: DEBUG nova.compute.utils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 863.058248] env[68285]: DEBUG nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 863.058475] env[68285]: DEBUG nova.network.neutron [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.099033] env[68285]: DEBUG nova.policy [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08b855c4d4af42cba6a5dfeb2f9dcd11', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ca04723be164bd6bc8759280a25797d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.133507] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891301, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787794} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.133757] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] b0f32ce2-92fd-4290-a2f4-e5658f775f4f/b0f32ce2-92fd-4290-a2f4-e5658f775f4f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.133969] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.134229] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06462291-7a15-4357-b334-b6419bb28539 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.141574] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 863.141574] env[68285]: value = "task-2891305" [ 863.141574] env[68285]: _type = "Task" [ 863.141574] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.152060] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891305, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.178949] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891304, 'name': CreateVM_Task, 'duration_secs': 0.414612} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.183515] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 863.184151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.184390] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.184700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 863.184973] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39921629-4750-4129-8846-81f816afac09 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.192335] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 863.192335] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522ef3ce-b69f-96f0-ad01-1a822ddda36f" [ 863.192335] env[68285]: _type = "Task" [ 863.192335] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.202498] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522ef3ce-b69f-96f0-ad01-1a822ddda36f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.428019] env[68285]: DEBUG oslo_concurrency.lockutils [req-0fecae99-cc0f-4ef4-82d4-46e1478450aa req-63aaa6e3-9e23-4e67-b401-cf88f20bcb5a service nova] Releasing lock "refresh_cache-437a18da-8fe4-478e-82a0-3b1a9da47df8" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.449332] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.497641] env[68285]: DEBUG nova.network.neutron [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Successfully created port: 5d280454-d5ca-4bfd-b516-72294e207a35 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.558756] env[68285]: DEBUG nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 863.657932] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891305, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077443} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.657932] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.657932] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e675fc-f843-4093-889a-e665463f77b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.685218] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] b0f32ce2-92fd-4290-a2f4-e5658f775f4f/b0f32ce2-92fd-4290-a2f4-e5658f775f4f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.688060] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b14eb30-ff99-4fb8-b5be-c640a45d06b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.713700] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522ef3ce-b69f-96f0-ad01-1a822ddda36f, 'name': SearchDatastore_Task, 'duration_secs': 0.018599} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.714946] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.715234] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.715478] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.715624] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.715800] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.716125] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 863.716125] env[68285]: value = "task-2891306" [ 863.716125] env[68285]: _type = "Task" [ 863.716125] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.718767] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5afed1cd-d232-417a-ba34-ffb223f7f56c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.730396] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891306, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.737492] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.737688] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 863.738502] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edb79764-05bf-4927-bb57-6cb4c5591407 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.744542] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 863.744542] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d8a757-6dc5-fad1-23b2-82a6ca9b8f83" [ 863.744542] env[68285]: _type = "Task" [ 863.744542] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.756456] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d8a757-6dc5-fad1-23b2-82a6ca9b8f83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.951214] env[68285]: DEBUG oslo_vmware.api [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Task: {'id': task-2891303, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.175386} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.951480] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.951662] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.951836] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.952032] env[68285]: INFO nova.compute.manager [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Took 2.14 seconds to destroy the instance on the hypervisor. [ 863.952270] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.952457] env[68285]: DEBUG nova.compute.manager [-] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 863.952556] env[68285]: DEBUG nova.network.neutron [-] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.101637] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed67fd0d-93ec-4d5e-8ff2-cba1e53b3ae0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.111347] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d637e16b-7c84-4939-80c7-d5709e2f0b20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.145220] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c2026f-186e-42d3-b6f3-5d318b6e94a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.153277] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962270b5-fec5-430e-9ccc-1fd22cb94cd3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.167573] env[68285]: DEBUG nova.compute.provider_tree [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.233753] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891306, 'name': ReconfigVM_Task, 'duration_secs': 0.325058} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.236179] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Reconfigured VM instance instance-00000021 to attach disk [datastore2] b0f32ce2-92fd-4290-a2f4-e5658f775f4f/b0f32ce2-92fd-4290-a2f4-e5658f775f4f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.236708] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48eee58c-f6ac-4d28-9774-035c43859e26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.244311] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 864.244311] env[68285]: value = "task-2891307" [ 864.244311] env[68285]: _type = "Task" [ 864.244311] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.257546] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d8a757-6dc5-fad1-23b2-82a6ca9b8f83, 'name': SearchDatastore_Task, 'duration_secs': 0.019978} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.262587] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891307, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.263167] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80670b5c-34d4-454b-b7fc-07554680e9f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.269643] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 864.269643] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d7ec2d-f3b6-96be-a250-9d4614c27d60" [ 864.269643] env[68285]: _type = "Task" [ 864.269643] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.284778] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d7ec2d-f3b6-96be-a250-9d4614c27d60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.284778] env[68285]: DEBUG nova.compute.manager [req-49203444-12c3-4c4a-9991-744c8c8023e8 req-9cd110c2-65ab-4e71-a211-9a048906fb5d service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Received event network-vif-deleted-a9adbb18-e996-4b1b-af89-73aa9fe32c71 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 864.285185] env[68285]: INFO nova.compute.manager [req-49203444-12c3-4c4a-9991-744c8c8023e8 req-9cd110c2-65ab-4e71-a211-9a048906fb5d service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Neutron deleted interface a9adbb18-e996-4b1b-af89-73aa9fe32c71; detaching it from the instance and deleting it from the info cache [ 864.285185] env[68285]: DEBUG nova.network.neutron [req-49203444-12c3-4c4a-9991-744c8c8023e8 req-9cd110c2-65ab-4e71-a211-9a048906fb5d service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.571639] env[68285]: DEBUG nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 864.597623] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 864.597875] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.598043] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 864.598246] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.598383] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 864.598527] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 864.598731] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 864.598887] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 864.599069] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 864.599240] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 864.599414] env[68285]: DEBUG nova.virt.hardware [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 864.600317] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3568c5ac-ac9d-474b-8500-10c5a70e4c2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.609246] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd22bd47-b3b7-4cd2-8cc9-5c83610b91cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.671401] env[68285]: DEBUG nova.scheduler.client.report [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.756198] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891307, 'name': Rename_Task, 'duration_secs': 0.171833} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.756476] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 864.756714] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ddcc4a7-be25-48b4-ad39-93c5db9e986c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.763548] env[68285]: DEBUG nova.network.neutron [-] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.765696] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 864.765696] env[68285]: value = "task-2891308" [ 864.765696] env[68285]: _type = "Task" [ 864.765696] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.778186] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891308, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.784809] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d7ec2d-f3b6-96be-a250-9d4614c27d60, 'name': SearchDatastore_Task, 'duration_secs': 0.010954} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.785195] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.785465] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 437a18da-8fe4-478e-82a0-3b1a9da47df8/437a18da-8fe4-478e-82a0-3b1a9da47df8.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 864.785725] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38b503be-0fdd-46e7-b7bd-a60e7fa3199b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.788587] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3f67045-95d2-4378-a97d-02c4f96ebe4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.800492] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99736059-de08-43d4-b098-987590b51251 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.814079] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 864.814079] env[68285]: value = "task-2891309" [ 864.814079] env[68285]: _type = "Task" [ 864.814079] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.824103] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.841419] env[68285]: DEBUG nova.compute.manager [req-49203444-12c3-4c4a-9991-744c8c8023e8 req-9cd110c2-65ab-4e71-a211-9a048906fb5d service nova] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Detach interface failed, port_id=a9adbb18-e996-4b1b-af89-73aa9fe32c71, reason: Instance fe9a8a13-73ec-4556-a62c-cc49fd01f539 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 865.065154] env[68285]: DEBUG nova.network.neutron [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Successfully updated port: 5d280454-d5ca-4bfd-b516-72294e207a35 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.176356] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.177056] env[68285]: DEBUG nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 865.180544] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.991s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.180869] env[68285]: DEBUG nova.objects.instance [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lazy-loading 'resources' on Instance uuid 95f5e902-6385-4602-8458-7d7b2069a9da {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.266967] env[68285]: INFO nova.compute.manager [-] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Took 1.31 seconds to deallocate network for instance. [ 865.282366] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891308, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.326037] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490635} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.326307] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 437a18da-8fe4-478e-82a0-3b1a9da47df8/437a18da-8fe4-478e-82a0-3b1a9da47df8.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.327041] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.327041] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9de670a-0262-4d30-ba6b-7de76a56f328 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.337755] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 865.337755] env[68285]: value = "task-2891310" [ 865.337755] env[68285]: _type = "Task" [ 865.337755] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.346036] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.567475] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "refresh_cache-bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.567633] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "refresh_cache-bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.567807] env[68285]: DEBUG nova.network.neutron [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.684814] env[68285]: DEBUG nova.compute.utils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 865.689489] env[68285]: DEBUG nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 865.689658] env[68285]: DEBUG nova.network.neutron [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 865.739216] env[68285]: DEBUG nova.policy [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '407484812afe4dbc8dce6dbd62791e90', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd031b28813554f658818a43c1cf566bc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 865.777649] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.777972] env[68285]: DEBUG oslo_vmware.api [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891308, 'name': PowerOnVM_Task, 'duration_secs': 0.687596} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.780613] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 865.780837] env[68285]: INFO nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Took 8.55 seconds to spawn the instance on the hypervisor. [ 865.781041] env[68285]: DEBUG nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.782929] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a1c86a-7557-4278-ad34-6ea25f236f7f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.850024] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098871} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.850186] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.850812] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8d3e4a-9c09-4ac8-a917-266b977b7b54 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.873545] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 437a18da-8fe4-478e-82a0-3b1a9da47df8/437a18da-8fe4-478e-82a0-3b1a9da47df8.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.876017] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa0dfa04-1fa3-47b0-907e-f28666bd810a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.898125] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 865.898125] env[68285]: value = "task-2891311" [ 865.898125] env[68285]: _type = "Task" [ 865.898125] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.910375] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891311, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.112791] env[68285]: DEBUG nova.network.neutron [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.193019] env[68285]: DEBUG nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 866.217021] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f264ea84-b2ae-4c36-acbd-f41723430d61 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.225579] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd77a51-b315-4850-b8df-53d055b48eac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.260652] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a018cf3-b94d-4860-87f6-cf3ec0534b05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.269756] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f28c5a-74b0-4e4a-bdcf-ebd35b8485fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.287804] env[68285]: DEBUG nova.compute.provider_tree [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.304786] env[68285]: INFO nova.compute.manager [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Took 38.79 seconds to build instance. [ 866.311692] env[68285]: DEBUG nova.compute.manager [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Received event network-vif-plugged-5d280454-d5ca-4bfd-b516-72294e207a35 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 866.311847] env[68285]: DEBUG oslo_concurrency.lockutils [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] Acquiring lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.312058] env[68285]: DEBUG oslo_concurrency.lockutils [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.312226] env[68285]: DEBUG oslo_concurrency.lockutils [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.312391] env[68285]: DEBUG nova.compute.manager [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] No waiting events found dispatching network-vif-plugged-5d280454-d5ca-4bfd-b516-72294e207a35 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.312542] env[68285]: WARNING nova.compute.manager [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Received unexpected event network-vif-plugged-5d280454-d5ca-4bfd-b516-72294e207a35 for instance with vm_state building and task_state spawning. [ 866.315016] env[68285]: DEBUG nova.compute.manager [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Received event network-changed-5d280454-d5ca-4bfd-b516-72294e207a35 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 866.315016] env[68285]: DEBUG nova.compute.manager [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Refreshing instance network info cache due to event network-changed-5d280454-d5ca-4bfd-b516-72294e207a35. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 866.315016] env[68285]: DEBUG oslo_concurrency.lockutils [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] Acquiring lock "refresh_cache-bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.358175] env[68285]: DEBUG nova.network.neutron [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Successfully created port: b5a66605-68ad-4258-bc3e-7132c919268e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.403230] env[68285]: DEBUG nova.network.neutron [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Updating instance_info_cache with network_info: [{"id": "5d280454-d5ca-4bfd-b516-72294e207a35", "address": "fa:16:3e:67:51:bb", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d280454-d5", "ovs_interfaceid": "5d280454-d5ca-4bfd-b516-72294e207a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.411879] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891311, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.790420] env[68285]: DEBUG nova.scheduler.client.report [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.807294] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e50b3faa-09a6-4867-89ad-18615ef933c3 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.783s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.908971] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891311, 'name': ReconfigVM_Task, 'duration_secs': 0.822685} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.909281] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 437a18da-8fe4-478e-82a0-3b1a9da47df8/437a18da-8fe4-478e-82a0-3b1a9da47df8.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.909922] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f450376-e317-4997-bae5-75f305623d7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.913118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "refresh_cache-bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.913398] env[68285]: DEBUG nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Instance network_info: |[{"id": "5d280454-d5ca-4bfd-b516-72294e207a35", "address": "fa:16:3e:67:51:bb", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d280454-d5", "ovs_interfaceid": "5d280454-d5ca-4bfd-b516-72294e207a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 866.913654] env[68285]: DEBUG oslo_concurrency.lockutils [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] Acquired lock "refresh_cache-bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.913821] env[68285]: DEBUG nova.network.neutron [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Refreshing network info cache for port 5d280454-d5ca-4bfd-b516-72294e207a35 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 866.914942] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:51:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bcd9d2d-25c8-41ad-9a4a-93b9029ba993', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d280454-d5ca-4bfd-b516-72294e207a35', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.922566] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.926746] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.927087] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 866.927087] env[68285]: value = "task-2891312" [ 866.927087] env[68285]: _type = "Task" [ 866.927087] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.927561] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c86b217-a4b9-49ec-8fcf-a09446b06865 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.952633] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891312, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.953927] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.953927] env[68285]: value = "task-2891313" [ 866.953927] env[68285]: _type = "Task" [ 866.953927] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.963368] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891313, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.202386] env[68285]: DEBUG nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 867.233088] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 867.233347] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.233502] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.233699] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.233855] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.234039] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 867.234263] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 867.234420] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 867.234729] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 867.234729] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 867.234895] env[68285]: DEBUG nova.virt.hardware [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 867.235840] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc9eb89-58a1-4e9f-8294-a92ced79d2c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.245653] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c1a506-1438-4bb5-af23-e741b3b15117 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.251170] env[68285]: DEBUG nova.network.neutron [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Updated VIF entry in instance network info cache for port 5d280454-d5ca-4bfd-b516-72294e207a35. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.251531] env[68285]: DEBUG nova.network.neutron [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Updating instance_info_cache with network_info: [{"id": "5d280454-d5ca-4bfd-b516-72294e207a35", "address": "fa:16:3e:67:51:bb", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d280454-d5", "ovs_interfaceid": "5d280454-d5ca-4bfd-b516-72294e207a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.295583] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.298106] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.594s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.300029] env[68285]: INFO nova.compute.claims [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.309430] env[68285]: DEBUG nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 867.321198] env[68285]: INFO nova.scheduler.client.report [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Deleted allocations for instance 95f5e902-6385-4602-8458-7d7b2069a9da [ 867.458178] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891312, 'name': Rename_Task, 'duration_secs': 0.182387} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.461954] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.462251] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c08cb7a-19a9-4a69-94af-5534059fae7a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.470125] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891313, 'name': CreateVM_Task, 'duration_secs': 0.447486} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.471194] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.471508] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 867.471508] env[68285]: value = "task-2891314" [ 867.471508] env[68285]: _type = "Task" [ 867.471508] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.472127] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.472286] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.472595] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 867.472879] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89f326bf-ee7e-4d7c-ae19-88236427032c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.485090] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.485411] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 867.485411] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d6b658-ef2a-7946-ab6a-c5a48d5b557d" [ 867.485411] env[68285]: _type = "Task" [ 867.485411] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.493824] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d6b658-ef2a-7946-ab6a-c5a48d5b557d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.753997] env[68285]: DEBUG oslo_concurrency.lockutils [req-d2a8ae0c-b3e0-45c9-9e1b-2127c6a3ad34 req-7273b5af-4480-4bc1-91f1-1d5cb8e30f05 service nova] Releasing lock "refresh_cache-bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.828620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2220d4fc-89d1-4230-a94d-ddf0e054501b tempest-DeleteServersAdminTestJSON-11817193 tempest-DeleteServersAdminTestJSON-11817193-project-member] Lock "95f5e902-6385-4602-8458-7d7b2069a9da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.114s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.832853] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.985280] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891314, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.997973] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d6b658-ef2a-7946-ab6a-c5a48d5b557d, 'name': SearchDatastore_Task, 'duration_secs': 0.020108} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.998484] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.998893] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.003215] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.003215] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.003215] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.005221] env[68285]: DEBUG nova.network.neutron [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Successfully updated port: b5a66605-68ad-4258-bc3e-7132c919268e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.008501] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec5429b7-09a9-4d80-aedb-a49236c254ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.019865] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.020599] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.021595] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82f91a4a-3c40-47ac-b9f9-dec15ac4886e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.028194] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 868.028194] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522696de-ae50-c4cb-02d0-f3d24fd24aa0" [ 868.028194] env[68285]: _type = "Task" [ 868.028194] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.037106] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522696de-ae50-c4cb-02d0-f3d24fd24aa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.345821] env[68285]: DEBUG nova.compute.manager [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Received event network-vif-plugged-b5a66605-68ad-4258-bc3e-7132c919268e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.346106] env[68285]: DEBUG oslo_concurrency.lockutils [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] Acquiring lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.346336] env[68285]: DEBUG oslo_concurrency.lockutils [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.346510] env[68285]: DEBUG oslo_concurrency.lockutils [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.346677] env[68285]: DEBUG nova.compute.manager [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] No waiting events found dispatching network-vif-plugged-b5a66605-68ad-4258-bc3e-7132c919268e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 868.346840] env[68285]: WARNING nova.compute.manager [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Received unexpected event network-vif-plugged-b5a66605-68ad-4258-bc3e-7132c919268e for instance with vm_state building and task_state spawning. [ 868.346994] env[68285]: DEBUG nova.compute.manager [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Received event network-changed-b5a66605-68ad-4258-bc3e-7132c919268e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.347205] env[68285]: DEBUG nova.compute.manager [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Refreshing instance network info cache due to event network-changed-b5a66605-68ad-4258-bc3e-7132c919268e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 868.347370] env[68285]: DEBUG oslo_concurrency.lockutils [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] Acquiring lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.347538] env[68285]: DEBUG oslo_concurrency.lockutils [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] Acquired lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.347666] env[68285]: DEBUG nova.network.neutron [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Refreshing network info cache for port b5a66605-68ad-4258-bc3e-7132c919268e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.489072] env[68285]: DEBUG oslo_vmware.api [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891314, 'name': PowerOnVM_Task, 'duration_secs': 0.539223} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.489354] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.489559] env[68285]: INFO nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Took 8.58 seconds to spawn the instance on the hypervisor. [ 868.489733] env[68285]: DEBUG nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.490524] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06455932-ea74-4eba-8beb-9b2007622e8e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.512278] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.544214] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522696de-ae50-c4cb-02d0-f3d24fd24aa0, 'name': SearchDatastore_Task, 'duration_secs': 0.010906} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.545961] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2f6c4d6-453e-467a-a4ee-30f180c4eda3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.559114] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 868.559114] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b48bed-feca-82ad-379e-28eae5bafe43" [ 868.559114] env[68285]: _type = "Task" [ 868.559114] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.571211] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b48bed-feca-82ad-379e-28eae5bafe43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.808080] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1f65c6-ad7a-4c6c-962f-a9465376448f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.816831] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32f13b3-437f-49a2-ad8e-6c965d332c80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.847737] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb7e70b-dddf-4a70-999b-faa91d308b17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.858131] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c618ac9-daa5-480f-8b1c-ac81ba238624 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.873961] env[68285]: DEBUG nova.compute.provider_tree [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.988628] env[68285]: DEBUG nova.network.neutron [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.017341] env[68285]: INFO nova.compute.manager [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Took 38.32 seconds to build instance. [ 869.071062] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b48bed-feca-82ad-379e-28eae5bafe43, 'name': SearchDatastore_Task, 'duration_secs': 0.012051} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.071395] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.071591] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] bda5b2fb-1875-4078-a4c1-f76f6abeaaf5/bda5b2fb-1875-4078-a4c1-f76f6abeaaf5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.071843] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5de7ded4-5600-46cd-ad96-1db2f8d3123b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.080528] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 869.080528] env[68285]: value = "task-2891315" [ 869.080528] env[68285]: _type = "Task" [ 869.080528] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.090383] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.138386] env[68285]: DEBUG nova.network.neutron [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.377275] env[68285]: DEBUG nova.scheduler.client.report [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.519490] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6247447-12ae-4123-8881-217cb79761c7 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.925s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.593287] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891315, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.641509] env[68285]: DEBUG oslo_concurrency.lockutils [req-699181c3-9108-4781-8508-a0a9fbafac20 req-28c1d4a4-3a31-47b6-8341-171345b34eee service nova] Releasing lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.641509] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquired lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.641509] env[68285]: DEBUG nova.network.neutron [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.882769] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.883309] env[68285]: DEBUG nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 869.885723] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.517s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.887094] env[68285]: INFO nova.compute.claims [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.023612] env[68285]: DEBUG nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 870.095108] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572122} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.095108] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] bda5b2fb-1875-4078-a4c1-f76f6abeaaf5/bda5b2fb-1875-4078-a4c1-f76f6abeaaf5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.095108] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.095108] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04af6016-45e6-4759-9a6e-f0751d5d9e8d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.102051] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 870.102051] env[68285]: value = "task-2891316" [ 870.102051] env[68285]: _type = "Task" [ 870.102051] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.110838] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891316, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.177232] env[68285]: DEBUG nova.network.neutron [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.393767] env[68285]: DEBUG nova.compute.utils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 870.397066] env[68285]: DEBUG nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 870.401438] env[68285]: DEBUG nova.network.neutron [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Updating instance_info_cache with network_info: [{"id": "b5a66605-68ad-4258-bc3e-7132c919268e", "address": "fa:16:3e:ac:f5:f6", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a66605-68", "ovs_interfaceid": "b5a66605-68ad-4258-bc3e-7132c919268e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.547423] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.614397] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891316, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071962} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.614656] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.615526] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d555b5e-654c-4c71-88f3-ad12c79138c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.638264] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] bda5b2fb-1875-4078-a4c1-f76f6abeaaf5/bda5b2fb-1875-4078-a4c1-f76f6abeaaf5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.638578] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53250a81-825b-47e9-9d15-8882a091ed5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.660581] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 870.660581] env[68285]: value = "task-2891317" [ 870.660581] env[68285]: _type = "Task" [ 870.660581] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.669691] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891317, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.899222] env[68285]: DEBUG nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 870.904345] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Releasing lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.904611] env[68285]: DEBUG nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Instance network_info: |[{"id": "b5a66605-68ad-4258-bc3e-7132c919268e", "address": "fa:16:3e:ac:f5:f6", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a66605-68", "ovs_interfaceid": "b5a66605-68ad-4258-bc3e-7132c919268e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 870.904989] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:f5:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5a66605-68ad-4258-bc3e-7132c919268e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.912534] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Creating folder: Project (d031b28813554f658818a43c1cf566bc). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.912758] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-869f2628-01ad-4edf-9bcf-4f76a6ef8a18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.926609] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Created folder: Project (d031b28813554f658818a43c1cf566bc) in parent group-v580775. [ 870.926868] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Creating folder: Instances. Parent ref: group-v580867. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.929691] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b15c74e2-ecbe-4900-a1ad-14c7736d5513 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.941685] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Created folder: Instances in parent group-v580867. [ 870.941939] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.944627] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.945058] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-368db97c-dfd6-4f01-b2e0-45c8faada4cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.970464] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.970464] env[68285]: value = "task-2891320" [ 870.970464] env[68285]: _type = "Task" [ 870.970464] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.982491] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891320, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.172883] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.416161] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d854fc-99a7-4b90-a0f4-283f2a126343 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.423783] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c235559-5e3d-4b0a-bd5b-269c731eb88c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.460493] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e562044f-6005-46f0-b1c3-d155a95ee748 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.470418] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae96c51-d189-403a-9320-d7f0aafadd6d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.483901] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891320, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.492546] env[68285]: DEBUG nova.compute.provider_tree [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.672298] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891317, 'name': ReconfigVM_Task, 'duration_secs': 0.838252} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.672536] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Reconfigured VM instance instance-00000023 to attach disk [datastore1] bda5b2fb-1875-4078-a4c1-f76f6abeaaf5/bda5b2fb-1875-4078-a4c1-f76f6abeaaf5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.673204] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f88cad0f-acfc-4689-a1ad-a0cd609ff5b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.681702] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 871.681702] env[68285]: value = "task-2891321" [ 871.681702] env[68285]: _type = "Task" [ 871.681702] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.692618] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891321, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.908838] env[68285]: DEBUG nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 871.934381] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 871.934745] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.934930] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 871.935160] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.935424] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 871.935599] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 871.935814] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 871.935973] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 871.936155] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 871.936318] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 871.936486] env[68285]: DEBUG nova.virt.hardware [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 871.937404] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396412a3-9a78-469d-8392-bf0cd8cd15e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.946629] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0ff1bb-30ae-426f-adec-f653a2187ef1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.960899] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.967020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Creating folder: Project (c9b5a79eddbc495d9cf06733fd00f8bb). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 871.967356] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae92a240-a05e-4c57-9f7f-1fc9618521ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.979914] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Created folder: Project (c9b5a79eddbc495d9cf06733fd00f8bb) in parent group-v580775. [ 871.980148] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Creating folder: Instances. Parent ref: group-v580870. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 871.981303] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8857bef7-ccea-4c09-a48c-7d49b58191a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.986417] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891320, 'name': CreateVM_Task, 'duration_secs': 0.723672} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.986569] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.987249] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.987451] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.987818] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.988033] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5b35662-e9ff-4246-ad3f-73f5ace936f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.991106] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Created folder: Instances in parent group-v580870. [ 871.991328] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 871.991511] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 871.991703] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b99656f0-f2a0-44c7-bf12-633e7dba9d63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.005986] env[68285]: DEBUG nova.scheduler.client.report [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.009352] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 872.009352] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52459109-daa5-06d0-82f7-4dc599703885" [ 872.009352] env[68285]: _type = "Task" [ 872.009352] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.016137] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.016137] env[68285]: value = "task-2891324" [ 872.016137] env[68285]: _type = "Task" [ 872.016137] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.022599] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52459109-daa5-06d0-82f7-4dc599703885, 'name': SearchDatastore_Task, 'duration_secs': 0.020742} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.027658] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.027904] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.028146] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.028319] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.028506] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.029361] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0901b187-a8bd-45ab-ad2d-6a2e7095cc2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.037384] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891324, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.039497] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.039697] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.040407] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f0dc26c-da5b-4686-958d-c504f3b7dd4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.046511] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 872.046511] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c21ebc-2877-06de-d955-a56cad9f7b70" [ 872.046511] env[68285]: _type = "Task" [ 872.046511] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.055987] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c21ebc-2877-06de-d955-a56cad9f7b70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.192347] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891321, 'name': Rename_Task, 'duration_secs': 0.270786} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.192617] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.192849] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5f8f3be-96ac-42cf-bc1d-75b5d29fd3ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.200209] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 872.200209] env[68285]: value = "task-2891325" [ 872.200209] env[68285]: _type = "Task" [ 872.200209] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.208964] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.512366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.512754] env[68285]: DEBUG nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 872.517311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.637s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.517311] env[68285]: DEBUG nova.objects.instance [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lazy-loading 'resources' on Instance uuid 8bedba57-e7c8-4fa8-b171-f6d74550a31c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.532843] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891324, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.559191] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c21ebc-2877-06de-d955-a56cad9f7b70, 'name': SearchDatastore_Task, 'duration_secs': 0.014244} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.560619] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e45b51-80e8-4526-96bc-b8a18451a5b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.567710] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 872.567710] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f36dfb-caf3-165d-9796-79c87d145d12" [ 872.567710] env[68285]: _type = "Task" [ 872.567710] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.577249] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f36dfb-caf3-165d-9796-79c87d145d12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.715208] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891325, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.019786] env[68285]: DEBUG nova.compute.utils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 873.026019] env[68285]: DEBUG nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 873.026019] env[68285]: DEBUG nova.network.neutron [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 873.039940] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891324, 'name': CreateVM_Task, 'duration_secs': 0.529679} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.039940] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.039940] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.039940] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.039940] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 873.040266] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-297dcb8f-f8d1-4db1-825c-ced85639df8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.047067] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 873.047067] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527c1864-e1d3-6796-76c1-b3a3a685f14d" [ 873.047067] env[68285]: _type = "Task" [ 873.047067] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.059324] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527c1864-e1d3-6796-76c1-b3a3a685f14d, 'name': SearchDatastore_Task} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.059613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.059942] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.060345] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.074020] env[68285]: DEBUG nova.policy [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11e000558c8a44878a90add053bc4a70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c212f8fe09c041209a51099ad3af16d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 873.082526] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f36dfb-caf3-165d-9796-79c87d145d12, 'name': SearchDatastore_Task, 'duration_secs': 0.0269} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.082718] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.082992] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d2c3e3eb-4b05-4e08-bd08-0f42560fcdba/d2c3e3eb-4b05-4e08-bd08-0f42560fcdba.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.083254] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.083444] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.083652] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15764b90-60a6-4776-af16-19fa94a38ca6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.085636] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9b630fc-5a82-4318-bdf1-e9b6f910d981 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.096477] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 873.096477] env[68285]: value = "task-2891326" [ 873.096477] env[68285]: _type = "Task" [ 873.096477] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.102692] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.102692] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 873.104641] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8d17935-2ebd-4da3-bc10-03d1c3379217 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.114405] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891326, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.116630] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 873.116630] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5263ed6b-756f-5106-02d2-9377ff3c1592" [ 873.116630] env[68285]: _type = "Task" [ 873.116630] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.133244] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5263ed6b-756f-5106-02d2-9377ff3c1592, 'name': SearchDatastore_Task} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.133795] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2c4dc73-b1e3-42a4-bd19-97332c5017d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.140220] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 873.140220] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a16a26-295f-bf34-a1e1-df4340d58e8a" [ 873.140220] env[68285]: _type = "Task" [ 873.140220] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.151091] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a16a26-295f-bf34-a1e1-df4340d58e8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.213088] env[68285]: DEBUG oslo_vmware.api [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891325, 'name': PowerOnVM_Task, 'duration_secs': 0.812387} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.213415] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.213623] env[68285]: INFO nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Took 8.64 seconds to spawn the instance on the hypervisor. [ 873.213798] env[68285]: DEBUG nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.217981] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1577704d-31d6-41f4-b027-80553e987855 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.418414] env[68285]: DEBUG nova.network.neutron [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Successfully created port: 474e30d6-abd2-42ca-a4e9-42f115b28cad {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.525858] env[68285]: DEBUG nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 873.617974] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891326, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.654841] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a16a26-295f-bf34-a1e1-df4340d58e8a, 'name': SearchDatastore_Task, 'duration_secs': 0.010661} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.655120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.655429] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.655715] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7ddac05-4845-4134-b794-ccced10dc32e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.666125] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 873.666125] env[68285]: value = "task-2891327" [ 873.666125] env[68285]: _type = "Task" [ 873.666125] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.675093] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.679216] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a68559-137e-49f0-bbd2-6ee5c7974b3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.689371] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895ac91e-1aa9-403b-97e2-8bfd69363840 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.723975] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c0f2b5-256b-47f4-8389-421f0b050998 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.741293] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278352bb-707b-455c-a844-7744fb1c30ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.747528] env[68285]: INFO nova.compute.manager [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Took 40.03 seconds to build instance. [ 873.758921] env[68285]: DEBUG nova.compute.provider_tree [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.112526] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891326, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.177716] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.250350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb4730e2-5b73-4820-b324-fefb3629d06c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.764s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.263364] env[68285]: DEBUG nova.scheduler.client.report [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.539615] env[68285]: DEBUG nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 874.573756] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 874.574095] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.574727] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 874.574727] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.574727] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 874.574931] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 874.575238] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 874.575456] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 874.575682] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 874.575902] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 874.576159] env[68285]: DEBUG nova.virt.hardware [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 874.577382] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f3a06d-0241-4b62-9d7e-bb8aa7d3ee56 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.589461] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbb902d-9890-43db-a5c8-59f178c77fb1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.622596] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891326, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.515046} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.622906] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d2c3e3eb-4b05-4e08-bd08-0f42560fcdba/d2c3e3eb-4b05-4e08-bd08-0f42560fcdba.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.623204] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.623531] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-375a650f-c2d0-4b45-bc66-a72004296d56 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.632593] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 874.632593] env[68285]: value = "task-2891328" [ 874.632593] env[68285]: _type = "Task" [ 874.632593] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.647704] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.679041] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.753230] env[68285]: DEBUG nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 874.768996] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.253s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.775385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.791s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.775385] env[68285]: DEBUG nova.objects.instance [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'resources' on Instance uuid 7dca07f4-78aa-45e4-954a-c9f4d58e7c84 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.837579] env[68285]: INFO nova.scheduler.client.report [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Deleted allocations for instance 8bedba57-e7c8-4fa8-b171-f6d74550a31c [ 875.144901] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106943} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.145329] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.146288] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14087959-3128-4a38-ac93-e3463232652d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.174691] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] d2c3e3eb-4b05-4e08-bd08-0f42560fcdba/d2c3e3eb-4b05-4e08-bd08-0f42560fcdba.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.175243] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19eed751-89f1-4ea8-bb7b-3be0b48d88b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.201034] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.201450] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.206689] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891327, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.395431} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.208294] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 875.208595] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.208955] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 875.208955] env[68285]: value = "task-2891329" [ 875.208955] env[68285]: _type = "Task" [ 875.208955] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.209258] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c3b2f08-7da9-430a-a815-b98d1e36df3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.221956] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891329, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.224363] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 875.224363] env[68285]: value = "task-2891330" [ 875.224363] env[68285]: _type = "Task" [ 875.224363] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.234057] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891330, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.280197] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.313534] env[68285]: DEBUG nova.network.neutron [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Successfully updated port: 474e30d6-abd2-42ca-a4e9-42f115b28cad {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.352435] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6373650-1dcf-405d-97a4-dbd861999671 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.758s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.353444] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 32.023s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.353687] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.354170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.354170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.355998] env[68285]: INFO nova.compute.manager [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Terminating instance [ 875.711096] env[68285]: DEBUG nova.compute.utils [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 875.723381] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891329, 'name': ReconfigVM_Task, 'duration_secs': 0.349506} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.726055] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Reconfigured VM instance instance-00000024 to attach disk [datastore1] d2c3e3eb-4b05-4e08-bd08-0f42560fcdba/d2c3e3eb-4b05-4e08-bd08-0f42560fcdba.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.729167] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c8fa6c8-9833-4674-b561-28483ffabb0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.745429] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891330, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076712} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.745717] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 875.745717] env[68285]: value = "task-2891331" [ 875.745717] env[68285]: _type = "Task" [ 875.745717] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.745942] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.746974] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f709369-5299-4277-8833-86cfe504ad53 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.775704] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.779588] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b7ac819-86c9-4d89-b2a3-9c39c6cbec3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.801375] env[68285]: DEBUG nova.compute.manager [req-c07cf0a1-26e5-4d0c-b8d9-0fcc3b89bc6f req-d33b58c8-6a85-4c78-bddf-732a378ce1f6 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Received event network-vif-plugged-474e30d6-abd2-42ca-a4e9-42f115b28cad {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.801624] env[68285]: DEBUG oslo_concurrency.lockutils [req-c07cf0a1-26e5-4d0c-b8d9-0fcc3b89bc6f req-d33b58c8-6a85-4c78-bddf-732a378ce1f6 service nova] Acquiring lock "324cc3e5-1c81-498e-b520-e9fca26013ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.801871] env[68285]: DEBUG oslo_concurrency.lockutils [req-c07cf0a1-26e5-4d0c-b8d9-0fcc3b89bc6f req-d33b58c8-6a85-4c78-bddf-732a378ce1f6 service nova] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.802061] env[68285]: DEBUG oslo_concurrency.lockutils [req-c07cf0a1-26e5-4d0c-b8d9-0fcc3b89bc6f req-d33b58c8-6a85-4c78-bddf-732a378ce1f6 service nova] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.802263] env[68285]: DEBUG nova.compute.manager [req-c07cf0a1-26e5-4d0c-b8d9-0fcc3b89bc6f req-d33b58c8-6a85-4c78-bddf-732a378ce1f6 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] No waiting events found dispatching network-vif-plugged-474e30d6-abd2-42ca-a4e9-42f115b28cad {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.802458] env[68285]: WARNING nova.compute.manager [req-c07cf0a1-26e5-4d0c-b8d9-0fcc3b89bc6f req-d33b58c8-6a85-4c78-bddf-732a378ce1f6 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Received unexpected event network-vif-plugged-474e30d6-abd2-42ca-a4e9-42f115b28cad for instance with vm_state building and task_state spawning. [ 875.811953] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 875.811953] env[68285]: value = "task-2891332" [ 875.811953] env[68285]: _type = "Task" [ 875.811953] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.815770] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "refresh_cache-324cc3e5-1c81-498e-b520-e9fca26013ef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.815874] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "refresh_cache-324cc3e5-1c81-498e-b520-e9fca26013ef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.816151] env[68285]: DEBUG nova.network.neutron [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.828026] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891332, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.830214] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfbb906-9746-4c2a-b4f2-6cd785d05399 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.838553] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29248ec-a144-4289-9887-d630fa795e04 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.873073] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.873294] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquired lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.873489] env[68285]: DEBUG nova.network.neutron [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.875839] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f52dbcf-ee9c-4798-bc39-2d460a2e459c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.885972] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282ab1bc-9320-445c-ae78-f0fe2d0f8619 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.902821] env[68285]: DEBUG nova.compute.provider_tree [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.217416] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.257390] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891331, 'name': Rename_Task, 'duration_secs': 0.212416} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.257674] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.257920] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e42614c6-3a60-420f-8adf-31d142de328d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.265599] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 876.265599] env[68285]: value = "task-2891333" [ 876.265599] env[68285]: _type = "Task" [ 876.265599] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.274007] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891333, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.336200] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891332, 'name': ReconfigVM_Task, 'duration_secs': 0.280833} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.336574] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.337424] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3f48748-efa4-4d94-b5f5-f8c8e6d363c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.347102] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 876.347102] env[68285]: value = "task-2891334" [ 876.347102] env[68285]: _type = "Task" [ 876.347102] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.356508] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891334, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.364196] env[68285]: DEBUG nova.network.neutron [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.376122] env[68285]: DEBUG nova.compute.utils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Can not refresh info_cache because instance was not found {{(pid=68285) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 876.404991] env[68285]: DEBUG nova.scheduler.client.report [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 876.638725] env[68285]: DEBUG nova.network.neutron [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.780237] env[68285]: DEBUG oslo_vmware.api [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891333, 'name': PowerOnVM_Task, 'duration_secs': 0.487951} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.780572] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.780790] env[68285]: INFO nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Took 9.58 seconds to spawn the instance on the hypervisor. [ 876.780998] env[68285]: DEBUG nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.782059] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041ca2a4-077f-4089-9fd0-67de3750fc18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.794224] env[68285]: DEBUG nova.network.neutron [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.858850] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891334, 'name': Rename_Task, 'duration_secs': 0.312702} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.859870] env[68285]: DEBUG nova.network.neutron [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Updating instance_info_cache with network_info: [{"id": "474e30d6-abd2-42ca-a4e9-42f115b28cad", "address": "fa:16:3e:ec:11:2d", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap474e30d6-ab", "ovs_interfaceid": "474e30d6-abd2-42ca-a4e9-42f115b28cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.861117] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.861780] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edd257c7-ebd5-44fc-8a4d-c9cd582a9ee8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.869952] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 876.869952] env[68285]: value = "task-2891335" [ 876.869952] env[68285]: _type = "Task" [ 876.869952] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.880064] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.910354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.913376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.398s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.914686] env[68285]: DEBUG nova.objects.instance [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lazy-loading 'resources' on Instance uuid 65f289bb-6e97-47ad-8531-c06a9cce302f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 876.939123] env[68285]: INFO nova.scheduler.client.report [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted allocations for instance 7dca07f4-78aa-45e4-954a-c9f4d58e7c84 [ 877.300852] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Releasing lock "refresh_cache-8bedba57-e7c8-4fa8-b171-f6d74550a31c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.301156] env[68285]: DEBUG nova.compute.manager [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 877.301353] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.301949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.302325] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.302465] env[68285]: INFO nova.compute.manager [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Attaching volume 446502c1-41d3-42eb-aded-5e5732e3748e to /dev/sdb [ 877.308902] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5a98c27-1afb-401a-a7d9-9c3846d84955 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.323646] env[68285]: INFO nova.compute.manager [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Took 39.26 seconds to build instance. [ 877.334221] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9aad2f5-4b60-43eb-b8cd-366b3fadbfc1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.374226] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "refresh_cache-324cc3e5-1c81-498e-b520-e9fca26013ef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.374558] env[68285]: DEBUG nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Instance network_info: |[{"id": "474e30d6-abd2-42ca-a4e9-42f115b28cad", "address": "fa:16:3e:ec:11:2d", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap474e30d6-ab", "ovs_interfaceid": "474e30d6-abd2-42ca-a4e9-42f115b28cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 877.375385] env[68285]: WARNING nova.virt.vmwareapi.vmops [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8bedba57-e7c8-4fa8-b171-f6d74550a31c could not be found. [ 877.375628] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.375819] env[68285]: INFO nova.compute.manager [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Took 0.07 seconds to destroy the instance on the hypervisor. [ 877.376151] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.378552] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:11:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b89fd3b-0470-40c9-bb5b-d52c76c030e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '474e30d6-abd2-42ca-a4e9-42f115b28cad', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.385725] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Creating folder: Project (c212f8fe09c041209a51099ad3af16d5). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.388851] env[68285]: DEBUG nova.compute.manager [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 877.388952] env[68285]: DEBUG nova.network.neutron [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 877.391270] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f6991e-2486-4cc6-8bd1-b407dc08ec91 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.396213] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fd8e816-ad8e-4acc-9444-1aef98db2898 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.407693] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1857d4-eb13-410b-ab28-b4e0c56ddc0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.410680] env[68285]: DEBUG oslo_vmware.api [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891335, 'name': PowerOnVM_Task, 'duration_secs': 0.476962} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.411585] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.411844] env[68285]: INFO nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Took 5.50 seconds to spawn the instance on the hypervisor. [ 877.412059] env[68285]: DEBUG nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 877.414597] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a107e56-a9e2-49eb-92e1-f2d01a403786 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.417395] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Created folder: Project (c212f8fe09c041209a51099ad3af16d5) in parent group-v580775. [ 877.417600] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Creating folder: Instances. Parent ref: group-v580873. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 877.420173] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7446ef8-87d2-4830-9370-ca7d2e27ad99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.427066] env[68285]: DEBUG nova.virt.block_device [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Updating existing volume attachment record: fde9de4d-026e-4d49-8291-fe38595093e8 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 877.430977] env[68285]: DEBUG nova.network.neutron [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.441018] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Created folder: Instances in parent group-v580873. [ 877.441018] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.441018] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.441018] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c24ba45f-260d-4296-b094-40bdbc79a7ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.460826] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99d0caa4-856a-452b-bba9-38ba50e33868 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "7dca07f4-78aa-45e4-954a-c9f4d58e7c84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.141s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.467070] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.467070] env[68285]: value = "task-2891338" [ 877.467070] env[68285]: _type = "Task" [ 877.467070] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.485682] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891338, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.827745] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd7e9122-30fc-4ecf-bfbe-11320660b37c tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.391s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.899418] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.900322] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.900322] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.900476] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.901048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.905025] env[68285]: INFO nova.compute.manager [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Terminating instance [ 877.938983] env[68285]: DEBUG nova.compute.manager [None req-aa9a161b-8c2f-4e1d-b574-1efd388f5cf5 tempest-ServerExternalEventsTest-1934348475 tempest-ServerExternalEventsTest-1934348475-project] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Received event network-changed {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 877.938983] env[68285]: DEBUG nova.compute.manager [None req-aa9a161b-8c2f-4e1d-b574-1efd388f5cf5 tempest-ServerExternalEventsTest-1934348475 tempest-ServerExternalEventsTest-1934348475-project] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Refreshing instance network info cache due to event network-changed. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 877.939221] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aa9a161b-8c2f-4e1d-b574-1efd388f5cf5 tempest-ServerExternalEventsTest-1934348475 tempest-ServerExternalEventsTest-1934348475-project] Acquiring lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.939263] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aa9a161b-8c2f-4e1d-b574-1efd388f5cf5 tempest-ServerExternalEventsTest-1934348475 tempest-ServerExternalEventsTest-1934348475-project] Acquired lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.939574] env[68285]: DEBUG nova.network.neutron [None req-aa9a161b-8c2f-4e1d-b574-1efd388f5cf5 tempest-ServerExternalEventsTest-1934348475 tempest-ServerExternalEventsTest-1934348475-project] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.945731] env[68285]: DEBUG nova.network.neutron [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.959722] env[68285]: INFO nova.compute.manager [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Took 37.28 seconds to build instance. [ 877.975216] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8b28ef-280f-4c65-bfa4-3dc4248a4484 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.986122] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891338, 'name': CreateVM_Task, 'duration_secs': 0.50699} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.988147] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.989207] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.989402] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.989724] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 877.990721] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e1e095-f7b0-475e-9d72-ff10ff94910e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.994288] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-248e6c87-0935-4618-a89e-13a0f21de05a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.000077] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 878.000077] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52348eef-8635-a45e-c2ab-60f63b7e9541" [ 878.000077] env[68285]: _type = "Task" [ 878.000077] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.042121] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff04f81-a3d6-482f-a33d-436335328fcd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.053598] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8886820f-8713-4973-b0ba-292f1b6d69bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.061022] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52348eef-8635-a45e-c2ab-60f63b7e9541, 'name': SearchDatastore_Task, 'duration_secs': 0.014772} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.061022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.061022] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.061022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.061022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.061022] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.061022] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-968afb47-2dd6-4430-8599-d5c3e404c520 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.068875] env[68285]: DEBUG nova.compute.provider_tree [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.079363] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.079591] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.080394] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf09510f-c863-459d-bb9f-6dc036432840 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.087239] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 878.087239] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52908e3f-966b-6b1c-c5df-5db54aea62aa" [ 878.087239] env[68285]: _type = "Task" [ 878.087239] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.095316] env[68285]: DEBUG nova.compute.manager [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Received event network-changed-474e30d6-abd2-42ca-a4e9-42f115b28cad {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 878.095316] env[68285]: DEBUG nova.compute.manager [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Refreshing instance network info cache due to event network-changed-474e30d6-abd2-42ca-a4e9-42f115b28cad. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 878.095316] env[68285]: DEBUG oslo_concurrency.lockutils [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] Acquiring lock "refresh_cache-324cc3e5-1c81-498e-b520-e9fca26013ef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.095316] env[68285]: DEBUG oslo_concurrency.lockutils [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] Acquired lock "refresh_cache-324cc3e5-1c81-498e-b520-e9fca26013ef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.095316] env[68285]: DEBUG nova.network.neutron [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Refreshing network info cache for port 474e30d6-abd2-42ca-a4e9-42f115b28cad {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.099016] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52908e3f-966b-6b1c-c5df-5db54aea62aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.329056] env[68285]: DEBUG nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.412194] env[68285]: DEBUG nova.compute.manager [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.412427] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.413316] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221de107-9472-46a2-8b80-33c5e8fed508 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.423262] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.423506] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b69495c0-95c8-496b-8cc8-8fb4396b4f00 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.430782] env[68285]: DEBUG oslo_vmware.api [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 878.430782] env[68285]: value = "task-2891343" [ 878.430782] env[68285]: _type = "Task" [ 878.430782] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.439867] env[68285]: DEBUG oslo_vmware.api [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.457048] env[68285]: INFO nova.compute.manager [-] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Took 1.07 seconds to deallocate network for instance. [ 878.461488] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a739eae3-9396-43c6-9519-a45032726fba tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.161s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.572563] env[68285]: DEBUG nova.scheduler.client.report [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.610679] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52908e3f-966b-6b1c-c5df-5db54aea62aa, 'name': SearchDatastore_Task, 'duration_secs': 0.024925} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.612181] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8ff3a61-4c57-42b5-ab45-2d4399ae4fb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.627565] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 878.627565] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d68626-8d23-5abc-dfbb-f71081e633cf" [ 878.627565] env[68285]: _type = "Task" [ 878.627565] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.644663] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d68626-8d23-5abc-dfbb-f71081e633cf, 'name': SearchDatastore_Task, 'duration_secs': 0.010549} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.646049] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.646680] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 324cc3e5-1c81-498e-b520-e9fca26013ef/324cc3e5-1c81-498e-b520-e9fca26013ef.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.647044] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11fd4051-f91f-4ba1-9be0-5bc485bfca9e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.656332] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 878.656332] env[68285]: value = "task-2891344" [ 878.656332] env[68285]: _type = "Task" [ 878.656332] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.667922] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.742298] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.742575] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.763427] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.763694] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.764146] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.764365] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.764557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.766629] env[68285]: INFO nova.compute.manager [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Terminating instance [ 878.863176] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.943202] env[68285]: DEBUG oslo_vmware.api [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891343, 'name': PowerOffVM_Task, 'duration_secs': 0.200333} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.943919] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.944200] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.944492] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aafd3779-2df2-4398-9a91-5c0c188e27de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.964870] env[68285]: INFO nova.compute.manager [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance disappeared during terminate [ 878.964870] env[68285]: DEBUG oslo_concurrency.lockutils [None req-213751d2-3342-425f-891e-8b98db36508d tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "8bedba57-e7c8-4fa8-b171-f6d74550a31c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.611s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.039570] env[68285]: DEBUG nova.network.neutron [None req-aa9a161b-8c2f-4e1d-b574-1efd388f5cf5 tempest-ServerExternalEventsTest-1934348475 tempest-ServerExternalEventsTest-1934348475-project] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Updating instance_info_cache with network_info: [{"id": "b5a66605-68ad-4258-bc3e-7132c919268e", "address": "fa:16:3e:ac:f5:f6", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a66605-68", "ovs_interfaceid": "b5a66605-68ad-4258-bc3e-7132c919268e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.043376] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.043663] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.043894] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Deleting the datastore file [datastore2] 11de7da5-1d73-4536-b2a1-f7dbbdec14b8 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.044590] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b46513f-d27e-43ef-8245-596de6f2d9c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.054855] env[68285]: DEBUG oslo_vmware.api [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for the task: (returnval){ [ 879.054855] env[68285]: value = "task-2891346" [ 879.054855] env[68285]: _type = "Task" [ 879.054855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.072728] env[68285]: DEBUG oslo_vmware.api [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.078890] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.083118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.293s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.084472] env[68285]: INFO nova.compute.claims [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 879.100202] env[68285]: INFO nova.scheduler.client.report [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Deleted allocations for instance 65f289bb-6e97-47ad-8531-c06a9cce302f [ 879.125796] env[68285]: DEBUG nova.network.neutron [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Updated VIF entry in instance network info cache for port 474e30d6-abd2-42ca-a4e9-42f115b28cad. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.126392] env[68285]: DEBUG nova.network.neutron [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Updating instance_info_cache with network_info: [{"id": "474e30d6-abd2-42ca-a4e9-42f115b28cad", "address": "fa:16:3e:ec:11:2d", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap474e30d6-ab", "ovs_interfaceid": "474e30d6-abd2-42ca-a4e9-42f115b28cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.172048] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891344, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.247071] env[68285]: DEBUG nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 879.274813] env[68285]: DEBUG nova.compute.manager [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.275068] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.276011] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f182f530-9ccd-4c17-919e-03188e9d725e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.285196] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.285476] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f7d7bef-3af3-4fab-b139-1d50d3946de2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.293633] env[68285]: DEBUG oslo_vmware.api [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 879.293633] env[68285]: value = "task-2891347" [ 879.293633] env[68285]: _type = "Task" [ 879.293633] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.302863] env[68285]: DEBUG oslo_vmware.api [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.545534] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aa9a161b-8c2f-4e1d-b574-1efd388f5cf5 tempest-ServerExternalEventsTest-1934348475 tempest-ServerExternalEventsTest-1934348475-project] Releasing lock "refresh_cache-d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.568703] env[68285]: DEBUG oslo_vmware.api [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Task: {'id': task-2891346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296289} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.568913] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.569123] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.569313] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.569485] env[68285]: INFO nova.compute.manager [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 879.569789] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.569953] env[68285]: DEBUG nova.compute.manager [-] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 879.570046] env[68285]: DEBUG nova.network.neutron [-] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.614873] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d154b863-2ff1-4c8c-add7-29dfd4388624 tempest-InstanceActionsNegativeTestJSON-573983198 tempest-InstanceActionsNegativeTestJSON-573983198-project-member] Lock "65f289bb-6e97-47ad-8531-c06a9cce302f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.834s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.629736] env[68285]: DEBUG oslo_concurrency.lockutils [req-79fab5c7-b2ad-4563-bc23-8a6af3983b33 req-487d3d25-cb76-4b7a-8d3a-d28cf280e112 service nova] Releasing lock "refresh_cache-324cc3e5-1c81-498e-b520-e9fca26013ef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.668459] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891344, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545577} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.668775] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 324cc3e5-1c81-498e-b520-e9fca26013ef/324cc3e5-1c81-498e-b520-e9fca26013ef.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.668998] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.669255] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-192b7a9e-e635-4c60-ad8a-4acf93d10826 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.676711] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 879.676711] env[68285]: value = "task-2891348" [ 879.676711] env[68285]: _type = "Task" [ 879.676711] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.685278] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891348, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.779017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.804382] env[68285]: DEBUG oslo_vmware.api [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891347, 'name': PowerOffVM_Task, 'duration_secs': 0.198351} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.804695] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.804893] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.805202] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-354f043a-bbba-4d27-b39a-d18a54b5dfcb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.886127] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.886396] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.886587] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Deleting the datastore file [datastore1] d2c3e3eb-4b05-4e08-bd08-0f42560fcdba {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.886876] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd992420-f8ac-4af1-8ca2-5ed86ab128a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.897173] env[68285]: DEBUG oslo_vmware.api [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for the task: (returnval){ [ 879.897173] env[68285]: value = "task-2891350" [ 879.897173] env[68285]: _type = "Task" [ 879.897173] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.910125] env[68285]: DEBUG oslo_vmware.api [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.017767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.017767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.018009] env[68285]: DEBUG nova.compute.manager [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.019975] env[68285]: INFO nova.compute.manager [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Rebuilding instance [ 880.021504] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ccfc1f-9762-4246-b7c5-7f0874adf992 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.029387] env[68285]: DEBUG nova.compute.manager [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 880.029992] env[68285]: DEBUG nova.objects.instance [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lazy-loading 'flavor' on Instance uuid b0f32ce2-92fd-4290-a2f4-e5658f775f4f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.078840] env[68285]: DEBUG nova.compute.manager [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.079975] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679cb9a3-5298-4bee-b018-a5d871cfbae0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.189222] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891348, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114828} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.194162] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.194661] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85234cf4-fbda-43c5-8e73-0786c3d2c5ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.221993] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 324cc3e5-1c81-498e-b520-e9fca26013ef/324cc3e5-1c81-498e-b520-e9fca26013ef.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.225427] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e34daaed-2384-441d-8989-209a2679c9a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.253078] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 880.253078] env[68285]: value = "task-2891352" [ 880.253078] env[68285]: _type = "Task" [ 880.253078] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.265877] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891352, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.332384] env[68285]: DEBUG nova.compute.manager [req-12b55edc-9135-45c7-8178-2cc1176cd015 req-10a3db32-2e07-4d51-b2cd-b94c95c65605 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Received event network-vif-deleted-3b795cd1-99e2-4a06-9607-e71ca33d19ff {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.332676] env[68285]: INFO nova.compute.manager [req-12b55edc-9135-45c7-8178-2cc1176cd015 req-10a3db32-2e07-4d51-b2cd-b94c95c65605 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Neutron deleted interface 3b795cd1-99e2-4a06-9607-e71ca33d19ff; detaching it from the instance and deleting it from the info cache [ 880.332939] env[68285]: DEBUG nova.network.neutron [req-12b55edc-9135-45c7-8178-2cc1176cd015 req-10a3db32-2e07-4d51-b2cd-b94c95c65605 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.408866] env[68285]: DEBUG oslo_vmware.api [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Task: {'id': task-2891350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.501136} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.411672] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.411868] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.412087] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.412317] env[68285]: INFO nova.compute.manager [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Took 1.14 seconds to destroy the instance on the hypervisor. [ 880.412603] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.413310] env[68285]: DEBUG nova.compute.manager [-] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.413415] env[68285]: DEBUG nova.network.neutron [-] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.486905] env[68285]: DEBUG nova.network.neutron [-] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.703108] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987b00df-0d6f-4a5e-ab18-2dc334748d54 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.711847] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bec2f0-f52a-444e-88b7-595f4a212318 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.745529] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e41409-1bb3-4dfd-aefa-7ccb16650b41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.753985] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c90afc9-8272-443d-b60a-445a56a502e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.767638] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891352, 'name': ReconfigVM_Task, 'duration_secs': 0.326321} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.775457] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 324cc3e5-1c81-498e-b520-e9fca26013ef/324cc3e5-1c81-498e-b520-e9fca26013ef.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.776416] env[68285]: DEBUG nova.compute.provider_tree [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.781602] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-376acfa1-e2a9-4790-9a33-6d7a46d2b480 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.789200] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 880.789200] env[68285]: value = "task-2891353" [ 880.789200] env[68285]: _type = "Task" [ 880.789200] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.801931] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891353, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.839154] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbd61204-b9ae-4c3d-a342-0386e8384297 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.850550] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02725145-3a88-49de-82b6-59aa478f485f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.889297] env[68285]: DEBUG nova.compute.manager [req-12b55edc-9135-45c7-8178-2cc1176cd015 req-10a3db32-2e07-4d51-b2cd-b94c95c65605 service nova] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Detach interface failed, port_id=3b795cd1-99e2-4a06-9607-e71ca33d19ff, reason: Instance 11de7da5-1d73-4536-b2a1-f7dbbdec14b8 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 880.992168] env[68285]: INFO nova.compute.manager [-] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Took 1.42 seconds to deallocate network for instance. [ 881.039926] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.040327] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3ad31aa-1877-41b7-b5b0-0c59617ddd21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.055146] env[68285]: DEBUG oslo_vmware.api [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 881.055146] env[68285]: value = "task-2891354" [ 881.055146] env[68285]: _type = "Task" [ 881.055146] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.068185] env[68285]: DEBUG oslo_vmware.api [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.097651] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.097762] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fd8c85c-0429-4b01-89ba-a5d16bcafe66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.106675] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 881.106675] env[68285]: value = "task-2891355" [ 881.106675] env[68285]: _type = "Task" [ 881.106675] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.120024] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.283683] env[68285]: DEBUG nova.network.neutron [-] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.285033] env[68285]: DEBUG nova.scheduler.client.report [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 881.300819] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891353, 'name': Rename_Task, 'duration_secs': 0.175133} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.301112] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 881.301364] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1277e1f0-c74f-4de0-a8f6-b186101dab97 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.312830] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 881.312830] env[68285]: value = "task-2891356" [ 881.312830] env[68285]: _type = "Task" [ 881.312830] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.322632] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.499210] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.567029] env[68285]: DEBUG oslo_vmware.api [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891354, 'name': PowerOffVM_Task, 'duration_secs': 0.303639} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.567029] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.567029] env[68285]: DEBUG nova.compute.manager [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.567029] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f771c3-76bf-417e-8793-f36df335f7df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.618847] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891355, 'name': PowerOffVM_Task, 'duration_secs': 0.183305} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.619947] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.620379] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.621320] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c15ce6d-3e0d-4ef1-af3e-3d1824efe226 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.631021] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 881.631021] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d3da734-4955-4ae2-a5ae-91c5d30a9b38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.663421] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.663421] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.663606] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Deleting the datastore file [datastore1] 29981c10-c6dd-4852-94ad-1f8f0135b8cc {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.664034] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3c24410-206c-4195-a983-fea62303123f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.672577] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 881.672577] env[68285]: value = "task-2891358" [ 881.672577] env[68285]: _type = "Task" [ 881.672577] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.681880] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.789230] env[68285]: INFO nova.compute.manager [-] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Took 1.38 seconds to deallocate network for instance. [ 881.790250] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.708s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.790955] env[68285]: DEBUG nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 881.795720] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.342s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.795981] env[68285]: DEBUG nova.objects.instance [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lazy-loading 'resources' on Instance uuid c8784827-a928-439d-abdf-d82b62a61152 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.825389] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891356, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.084565] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddb308e7-c984-452d-aada-26030bd15e79 tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.067s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.188562] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141866} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.188872] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.189249] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.189249] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.302020] env[68285]: DEBUG nova.compute.utils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 882.306649] env[68285]: DEBUG nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 882.307510] env[68285]: DEBUG nova.network.neutron [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 882.310525] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.328816] env[68285]: DEBUG oslo_vmware.api [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891356, 'name': PowerOnVM_Task, 'duration_secs': 0.532942} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.329186] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.329705] env[68285]: INFO nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Took 7.79 seconds to spawn the instance on the hypervisor. [ 882.330044] env[68285]: DEBUG nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.331156] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0511c39d-3b3c-4217-bbfe-6df2a652be03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.421411] env[68285]: DEBUG nova.policy [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ab8a3422e384ef19be08ecb6ee6a1c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68e85847129e4ba38c7625ad1c4efc82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 882.426207] env[68285]: DEBUG nova.compute.manager [req-f13b0359-671b-40de-ac46-fed1eb665f7b req-0d52cf60-0497-4ccd-9947-b6c555d949f5 service nova] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Received event network-vif-deleted-b5a66605-68ad-4258-bc3e-7132c919268e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.511465] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 882.511772] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580877', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'name': 'volume-446502c1-41d3-42eb-aded-5e5732e3748e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52fbfbe4-1807-4d6d-9139-ebe30e6bf647', 'attached_at': '', 'detached_at': '', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'serial': '446502c1-41d3-42eb-aded-5e5732e3748e'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 882.512720] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5d4aa4-7c28-4b2b-80e2-15d1c764be7a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.539777] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69e7419-a1ac-4337-9291-84b8dbf1af0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.572566] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] volume-446502c1-41d3-42eb-aded-5e5732e3748e/volume-446502c1-41d3-42eb-aded-5e5732e3748e.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.575934] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90a67997-a3a2-451f-bf27-31c46dfaaec5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.598464] env[68285]: DEBUG oslo_vmware.api [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 882.598464] env[68285]: value = "task-2891359" [ 882.598464] env[68285]: _type = "Task" [ 882.598464] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.611009] env[68285]: DEBUG oslo_vmware.api [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891359, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.807912] env[68285]: DEBUG nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 882.863145] env[68285]: INFO nova.compute.manager [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Took 41.52 seconds to build instance. [ 882.979237] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18719e-5395-4842-a8f9-dd6523b6359a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.989854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9afaff-74a1-4966-b764-f2014aba1558 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.024659] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46a87db-df18-46fa-a833-a3608942bb5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.035548] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2799e214-164b-46d0-ae1f-08191722234d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.050040] env[68285]: DEBUG nova.compute.provider_tree [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.110677] env[68285]: DEBUG oslo_vmware.api [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891359, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.251036] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 883.251036] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.251036] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.251036] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.251036] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.251308] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 883.251587] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 883.251795] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 883.252009] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 883.252216] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 883.252427] env[68285]: DEBUG nova.virt.hardware [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 883.253377] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3d2b44-6cf7-4382-909c-44d490644610 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.265455] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56524443-79c3-4cd9-a3cc-5e18e344de19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.282402] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.288413] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.288794] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.289081] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbcc90ec-29dc-425e-bc77-6af7dec12667 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.310194] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.310194] env[68285]: value = "task-2891360" [ 883.310194] env[68285]: _type = "Task" [ 883.310194] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.333048] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891360, 'name': CreateVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.370238] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8962ec26-3529-4301-8e90-f2ce99b138f4 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.683s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.430138] env[68285]: DEBUG nova.network.neutron [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Successfully created port: f2956a29-080d-46ea-92ed-5591a1c77685 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.457132] env[68285]: DEBUG nova.objects.instance [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lazy-loading 'flavor' on Instance uuid b0f32ce2-92fd-4290-a2f4-e5658f775f4f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.554521] env[68285]: DEBUG nova.scheduler.client.report [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.613648] env[68285]: DEBUG oslo_vmware.api [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891359, 'name': ReconfigVM_Task, 'duration_secs': 0.874568} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.614020] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Reconfigured VM instance instance-00000008 to attach disk [datastore2] volume-446502c1-41d3-42eb-aded-5e5732e3748e/volume-446502c1-41d3-42eb-aded-5e5732e3748e.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.619536] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1127f32-3eb7-4ea8-93d2-20f8d50a3252 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.639034] env[68285]: DEBUG oslo_vmware.api [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 883.639034] env[68285]: value = "task-2891361" [ 883.639034] env[68285]: _type = "Task" [ 883.639034] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.651788] env[68285]: DEBUG oslo_vmware.api [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891361, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.827118] env[68285]: DEBUG nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 883.837566] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891360, 'name': CreateVM_Task, 'duration_secs': 0.490988} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.837800] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.838414] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.838604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.838991] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 883.839328] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4f0e0a1-cf41-4568-b3a5-f4039e0305d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.847235] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 883.847235] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d33be7-d845-40c1-e586-82b426595cd7" [ 883.847235] env[68285]: _type = "Task" [ 883.847235] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.858923] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d33be7-d845-40c1-e586-82b426595cd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.867179] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 883.867179] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.869692] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.869958] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.870136] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.870429] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 883.870609] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 883.870789] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 883.870960] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 883.871143] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 883.871322] env[68285]: DEBUG nova.virt.hardware [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 883.872202] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43a0647-9939-4c77-adc6-91cc07b136b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.883411] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33738ef-7d0c-427c-b45e-8fbdb347fe69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.962510] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.962693] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquired lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.962864] env[68285]: DEBUG nova.network.neutron [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.963492] env[68285]: DEBUG nova.objects.instance [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lazy-loading 'info_cache' on Instance uuid b0f32ce2-92fd-4290-a2f4-e5658f775f4f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 884.063048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.265s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.063048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.776s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.066203] env[68285]: INFO nova.compute.claims [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.092400] env[68285]: INFO nova.scheduler.client.report [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Deleted allocations for instance c8784827-a928-439d-abdf-d82b62a61152 [ 884.149933] env[68285]: DEBUG oslo_vmware.api [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891361, 'name': ReconfigVM_Task, 'duration_secs': 0.186367} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.150330] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580877', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'name': 'volume-446502c1-41d3-42eb-aded-5e5732e3748e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52fbfbe4-1807-4d6d-9139-ebe30e6bf647', 'attached_at': '', 'detached_at': '', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'serial': '446502c1-41d3-42eb-aded-5e5732e3748e'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 884.375273] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d33be7-d845-40c1-e586-82b426595cd7, 'name': SearchDatastore_Task, 'duration_secs': 0.025169} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.375273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.375273] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.375273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.375273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.375273] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.375273] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2dc793f-76aa-4512-9213-3371c77c1b7c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.391930] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.392069] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.392804] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54fb2479-9d4d-4522-b313-d37b6bcb7234 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.428713] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 884.428713] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c0af18-71bf-7c4b-9866-710e9daa134f" [ 884.428713] env[68285]: _type = "Task" [ 884.428713] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.454338] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c0af18-71bf-7c4b-9866-710e9daa134f, 'name': SearchDatastore_Task, 'duration_secs': 0.015433} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.455302] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b20a9114-c4f2-436d-b517-374d3917f95d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.468747] env[68285]: DEBUG nova.objects.base [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 884.477401] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 884.477401] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524133d0-c72d-d6c7-5fc8-268661d88cb6" [ 884.477401] env[68285]: _type = "Task" [ 884.477401] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.497514] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524133d0-c72d-d6c7-5fc8-268661d88cb6, 'name': SearchDatastore_Task, 'duration_secs': 0.011664} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.497514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.497514] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 884.497782] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d1b57e5-aa6f-4ab9-aa63-4ec196b3a498 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.511516] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 884.511516] env[68285]: value = "task-2891362" [ 884.511516] env[68285]: _type = "Task" [ 884.511516] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.525141] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.608604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc6f5a97-9c3d-4605-903a-78adb8974419 tempest-ServerMetadataNegativeTestJSON-539636840 tempest-ServerMetadataNegativeTestJSON-539636840-project-member] Lock "c8784827-a928-439d-abdf-d82b62a61152" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.249s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.031158] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891362, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.205406] env[68285]: DEBUG nova.objects.instance [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lazy-loading 'flavor' on Instance uuid 52fbfbe4-1807-4d6d-9139-ebe30e6bf647 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.529858] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531445} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.530740] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.530959] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.531259] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b976328a-8955-41aa-a2c0-5e00975e77a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.540087] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 885.540087] env[68285]: value = "task-2891363" [ 885.540087] env[68285]: _type = "Task" [ 885.540087] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.552577] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.671196] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21faca13-043f-4f93-bc46-03c3e7164417 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.683042] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14f0819-4bcb-4f59-8a1f-e75ef5cc11ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.718385] env[68285]: DEBUG nova.network.neutron [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Updating instance_info_cache with network_info: [{"id": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "address": "fa:16:3e:d4:7c:0f", "network": {"id": "12f11a62-40d0-4668-a558-86bd6b08e0a8", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-2047022046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ca04723be164bd6bc8759280a25797d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3761ed0-ea", "ovs_interfaceid": "c3761ed0-eacf-4744-a549-4868f00f2bb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.720493] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ad149a8e-dafc-40ba-9fe1-8523ba129347 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.418s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.721966] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f031f545-e3f5-48cc-a9bb-8f67e0532051 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.733187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a1bfb6-b4ab-49b8-9233-56321ea62903 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.752771] env[68285]: DEBUG nova.compute.provider_tree [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.950806] env[68285]: DEBUG nova.compute.manager [req-f4f89847-c410-4b71-b940-de37e02b226b req-c63275bf-aac4-48a6-a561-6df4bd674041 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Received event network-vif-plugged-f2956a29-080d-46ea-92ed-5591a1c77685 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.951072] env[68285]: DEBUG oslo_concurrency.lockutils [req-f4f89847-c410-4b71-b940-de37e02b226b req-c63275bf-aac4-48a6-a561-6df4bd674041 service nova] Acquiring lock "753bb2f7-bf0a-401e-81af-93982558d3b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.951239] env[68285]: DEBUG oslo_concurrency.lockutils [req-f4f89847-c410-4b71-b940-de37e02b226b req-c63275bf-aac4-48a6-a561-6df4bd674041 service nova] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.951403] env[68285]: DEBUG oslo_concurrency.lockutils [req-f4f89847-c410-4b71-b940-de37e02b226b req-c63275bf-aac4-48a6-a561-6df4bd674041 service nova] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.951565] env[68285]: DEBUG nova.compute.manager [req-f4f89847-c410-4b71-b940-de37e02b226b req-c63275bf-aac4-48a6-a561-6df4bd674041 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] No waiting events found dispatching network-vif-plugged-f2956a29-080d-46ea-92ed-5591a1c77685 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.951723] env[68285]: WARNING nova.compute.manager [req-f4f89847-c410-4b71-b940-de37e02b226b req-c63275bf-aac4-48a6-a561-6df4bd674041 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Received unexpected event network-vif-plugged-f2956a29-080d-46ea-92ed-5591a1c77685 for instance with vm_state building and task_state spawning. [ 885.967972] env[68285]: DEBUG nova.network.neutron [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Successfully updated port: f2956a29-080d-46ea-92ed-5591a1c77685 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.052027] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077383} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.052319] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.053120] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0b756e-2c79-4917-8e4e-7a946be5506f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.074728] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.075028] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90f75356-e820-4b5d-9933-c8c9579513b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.096439] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 886.096439] env[68285]: value = "task-2891364" [ 886.096439] env[68285]: _type = "Task" [ 886.096439] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.106132] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.226101] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Releasing lock "refresh_cache-b0f32ce2-92fd-4290-a2f4-e5658f775f4f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.255753] env[68285]: DEBUG nova.scheduler.client.report [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.473218] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "refresh_cache-753bb2f7-bf0a-401e-81af-93982558d3b7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.473341] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquired lock "refresh_cache-753bb2f7-bf0a-401e-81af-93982558d3b7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.473508] env[68285]: DEBUG nova.network.neutron [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.607511] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891364, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.761835] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.762298] env[68285]: DEBUG nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.765032] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.850s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.766573] env[68285]: INFO nova.compute.claims [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.792098] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.792370] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.002s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.010244] env[68285]: DEBUG nova.network.neutron [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.118172] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891364, 'name': ReconfigVM_Task, 'duration_secs': 0.532114} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.118172] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 29981c10-c6dd-4852-94ad-1f8f0135b8cc/29981c10-c6dd-4852-94ad-1f8f0135b8cc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.118172] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98a4bbad-a15b-4dc7-8514-0a52c3a825ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.125973] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 887.125973] env[68285]: value = "task-2891365" [ 887.125973] env[68285]: _type = "Task" [ 887.125973] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.138759] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891365, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.197438] env[68285]: DEBUG nova.network.neutron [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Updating instance_info_cache with network_info: [{"id": "f2956a29-080d-46ea-92ed-5591a1c77685", "address": "fa:16:3e:39:4f:31", "network": {"id": "47849d8f-6243-42f5-bb98-3b99e145ba5d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-303257761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68e85847129e4ba38c7625ad1c4efc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2956a29-08", "ovs_interfaceid": "f2956a29-080d-46ea-92ed-5591a1c77685", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.236380] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.236475] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-584c45cd-7c79-430f-8fbf-526fa62ee4a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.247416] env[68285]: DEBUG oslo_vmware.api [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 887.247416] env[68285]: value = "task-2891366" [ 887.247416] env[68285]: _type = "Task" [ 887.247416] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.257636] env[68285]: DEBUG oslo_vmware.api [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891366, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.281020] env[68285]: DEBUG nova.compute.utils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 887.283959] env[68285]: DEBUG nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 887.284216] env[68285]: DEBUG nova.network.neutron [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 887.296201] env[68285]: INFO nova.compute.manager [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Detaching volume 446502c1-41d3-42eb-aded-5e5732e3748e [ 887.357028] env[68285]: DEBUG nova.policy [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '712fb26143084c72a09ca405f7f44467', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0df6f9cd11e4cbea0a5d25e546ade05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 887.358969] env[68285]: INFO nova.virt.block_device [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Attempting to driver detach volume 446502c1-41d3-42eb-aded-5e5732e3748e from mountpoint /dev/sdb [ 887.359241] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 887.359446] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580877', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'name': 'volume-446502c1-41d3-42eb-aded-5e5732e3748e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52fbfbe4-1807-4d6d-9139-ebe30e6bf647', 'attached_at': '', 'detached_at': '', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'serial': '446502c1-41d3-42eb-aded-5e5732e3748e'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 887.360346] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f296e81-aef0-449e-836a-74819c5383e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.391497] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bb7206-ff55-4b7e-bf3b-80f95d39f48f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.400212] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adbc8e7-4775-44ff-bc90-eb7112b9242e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.435714] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de34a044-a5a9-404e-a840-577657967d85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.444531] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.444786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.465728] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] The volume has not been displaced from its original location: [datastore2] volume-446502c1-41d3-42eb-aded-5e5732e3748e/volume-446502c1-41d3-42eb-aded-5e5732e3748e.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 887.471036] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Reconfiguring VM instance instance-00000008 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 887.471706] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4d437b3-d732-4b1e-975b-df8bdad19129 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.494198] env[68285]: DEBUG oslo_vmware.api [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 887.494198] env[68285]: value = "task-2891367" [ 887.494198] env[68285]: _type = "Task" [ 887.494198] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.505508] env[68285]: DEBUG oslo_vmware.api [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.638928] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891365, 'name': Rename_Task, 'duration_secs': 0.236467} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.639317] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.639624] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-174ecace-4b46-44b3-9e6c-f8c459405a99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.654229] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "c7ab28c3-a316-4685-b876-a0e7c657ec35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.654659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "c7ab28c3-a316-4685-b876-a0e7c657ec35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.654917] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 887.654917] env[68285]: value = "task-2891368" [ 887.654917] env[68285]: _type = "Task" [ 887.654917] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.667630] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.700339] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Releasing lock "refresh_cache-753bb2f7-bf0a-401e-81af-93982558d3b7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.700728] env[68285]: DEBUG nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Instance network_info: |[{"id": "f2956a29-080d-46ea-92ed-5591a1c77685", "address": "fa:16:3e:39:4f:31", "network": {"id": "47849d8f-6243-42f5-bb98-3b99e145ba5d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-303257761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68e85847129e4ba38c7625ad1c4efc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2956a29-08", "ovs_interfaceid": "f2956a29-080d-46ea-92ed-5591a1c77685", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 887.701213] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:4f:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2956a29-080d-46ea-92ed-5591a1c77685', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 887.710060] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Creating folder: Project (68e85847129e4ba38c7625ad1c4efc82). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.710472] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13b069c9-a186-4ee8-973f-fb6d3f464553 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.723696] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Created folder: Project (68e85847129e4ba38c7625ad1c4efc82) in parent group-v580775. [ 887.723951] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Creating folder: Instances. Parent ref: group-v580879. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.724215] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47e9dc1a-d320-4a27-8ca2-c1780ae5b45f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.736336] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Created folder: Instances in parent group-v580879. [ 887.737027] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 887.737027] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 887.739920] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cdbc569-2860-46bc-8eba-a68d91117836 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.766743] env[68285]: DEBUG oslo_vmware.api [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891366, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.768221] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 887.768221] env[68285]: value = "task-2891371" [ 887.768221] env[68285]: _type = "Task" [ 887.768221] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.777464] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891371, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.784581] env[68285]: DEBUG nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.949421] env[68285]: DEBUG nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 887.997087] env[68285]: DEBUG nova.compute.manager [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Received event network-changed-f2956a29-080d-46ea-92ed-5591a1c77685 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.997434] env[68285]: DEBUG nova.compute.manager [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Refreshing instance network info cache due to event network-changed-f2956a29-080d-46ea-92ed-5591a1c77685. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 887.997683] env[68285]: DEBUG oslo_concurrency.lockutils [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] Acquiring lock "refresh_cache-753bb2f7-bf0a-401e-81af-93982558d3b7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.997833] env[68285]: DEBUG oslo_concurrency.lockutils [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] Acquired lock "refresh_cache-753bb2f7-bf0a-401e-81af-93982558d3b7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.998072] env[68285]: DEBUG nova.network.neutron [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Refreshing network info cache for port f2956a29-080d-46ea-92ed-5591a1c77685 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.014846] env[68285]: DEBUG oslo_vmware.api [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891367, 'name': ReconfigVM_Task, 'duration_secs': 0.425788} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.018746] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Reconfigured VM instance instance-00000008 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 888.026948] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67b6c4e0-94fe-46e2-afa9-662959be50f8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.042279] env[68285]: DEBUG nova.network.neutron [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Successfully created port: f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.053067] env[68285]: DEBUG oslo_vmware.api [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 888.053067] env[68285]: value = "task-2891372" [ 888.053067] env[68285]: _type = "Task" [ 888.053067] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.074481] env[68285]: DEBUG oslo_vmware.api [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891372, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.167414] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891368, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.219710] env[68285]: DEBUG nova.compute.manager [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.220627] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9bc2c7-d961-4eb4-b5e8-15be6ab81863 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.266322] env[68285]: DEBUG oslo_vmware.api [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891366, 'name': PowerOnVM_Task, 'duration_secs': 0.695018} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.266660] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.266864] env[68285]: DEBUG nova.compute.manager [None req-31ce4e87-0c2a-4e5d-9e0c-509cde4ea9dd tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.267853] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba8268a-2c3d-4f05-852f-15f9a669b424 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.286569] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891371, 'name': CreateVM_Task, 'duration_secs': 0.492076} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.286978] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.287729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.290944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.290944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 888.290944] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c97151e-7784-443f-9756-a4a49f4cfa63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.300076] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 888.300076] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5235afa7-a91c-8c43-79c8-d2319436d506" [ 888.300076] env[68285]: _type = "Task" [ 888.300076] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.311601] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5235afa7-a91c-8c43-79c8-d2319436d506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.441360] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb29b98-d2d2-4846-b97b-dc2f8d2d26f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.448073] env[68285]: DEBUG oslo_concurrency.lockutils [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Acquiring lock "5b58896c-cb07-48c8-ace0-385486a3e19d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.448331] env[68285]: DEBUG oslo_concurrency.lockutils [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.452681] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73ab79d-2bb3-4acd-b1c3-cca2f332871f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.491270] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.492279] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e53713-9d3a-41aa-bd42-edba1257c62a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.501053] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4922d07e-d43b-40ba-9963-05ef1325ecef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.518345] env[68285]: DEBUG nova.compute.provider_tree [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.564474] env[68285]: DEBUG oslo_vmware.api [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891372, 'name': ReconfigVM_Task, 'duration_secs': 0.184844} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.564908] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580877', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'name': 'volume-446502c1-41d3-42eb-aded-5e5732e3748e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52fbfbe4-1807-4d6d-9139-ebe30e6bf647', 'attached_at': '', 'detached_at': '', 'volume_id': '446502c1-41d3-42eb-aded-5e5732e3748e', 'serial': '446502c1-41d3-42eb-aded-5e5732e3748e'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 888.666964] env[68285]: DEBUG oslo_vmware.api [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891368, 'name': PowerOnVM_Task, 'duration_secs': 0.788736} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.667245] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.667507] env[68285]: DEBUG nova.compute.manager [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.668271] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503d10bd-7dad-4b0d-914c-d2d575b901c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.725119] env[68285]: DEBUG nova.network.neutron [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Updated VIF entry in instance network info cache for port f2956a29-080d-46ea-92ed-5591a1c77685. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.725568] env[68285]: DEBUG nova.network.neutron [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Updating instance_info_cache with network_info: [{"id": "f2956a29-080d-46ea-92ed-5591a1c77685", "address": "fa:16:3e:39:4f:31", "network": {"id": "47849d8f-6243-42f5-bb98-3b99e145ba5d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-303257761-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68e85847129e4ba38c7625ad1c4efc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2956a29-08", "ovs_interfaceid": "f2956a29-080d-46ea-92ed-5591a1c77685", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.734131] env[68285]: INFO nova.compute.manager [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] instance snapshotting [ 888.736952] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f2c71f-9388-4544-bfd4-ab832e907da2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.756562] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85c59b4-437a-476d-9611-0e5e8c39c799 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.805120] env[68285]: DEBUG nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.812778] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5235afa7-a91c-8c43-79c8-d2319436d506, 'name': SearchDatastore_Task, 'duration_secs': 0.029462} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.813317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.813317] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.813538] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.813683] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.813861] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.814149] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e96dd6a0-7ba7-4bd9-98dd-0390c14a7103 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.831053] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.831262] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.834043] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.834317] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.834542] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.834796] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.834995] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.835221] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.835474] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.835685] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.835924] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.836158] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.836375] env[68285]: DEBUG nova.virt.hardware [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.836682] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62f548a7-6232-4fe6-abd1-392c30160b3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.839693] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39dabda-0cf2-4b58-8ba0-826aaf2d2c55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.851270] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad37ff9b-f3ee-4f28-8ed0-8d5ca10d8893 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.855226] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 888.855226] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520d91ed-535c-e5fc-840e-06cc6312066b" [ 888.855226] env[68285]: _type = "Task" [ 888.855226] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.871382] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520d91ed-535c-e5fc-840e-06cc6312066b, 'name': SearchDatastore_Task, 'duration_secs': 0.014173} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.872136] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c0c0ba8-5591-4608-968f-fdbddfa9dd73 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.877411] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 888.877411] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ee8067-160b-9020-f19b-32728121d917" [ 888.877411] env[68285]: _type = "Task" [ 888.877411] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.885282] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ee8067-160b-9020-f19b-32728121d917, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.953163] env[68285]: DEBUG nova.compute.utils [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 889.022779] env[68285]: DEBUG nova.scheduler.client.report [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.126799] env[68285]: DEBUG nova.objects.instance [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lazy-loading 'flavor' on Instance uuid 52fbfbe4-1807-4d6d-9139-ebe30e6bf647 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.184309] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.228722] env[68285]: DEBUG oslo_concurrency.lockutils [req-a0b60d1a-6f41-4263-83d9-906febc3e47f req-f04af892-00bb-41b3-a4e3-909fac3d8688 service nova] Releasing lock "refresh_cache-753bb2f7-bf0a-401e-81af-93982558d3b7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.267232] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 889.267609] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6d6de40d-33ec-4eef-b28c-f8369cd0117b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.277240] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 889.277240] env[68285]: value = "task-2891373" [ 889.277240] env[68285]: _type = "Task" [ 889.277240] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.286983] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891373, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.388869] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ee8067-160b-9020-f19b-32728121d917, 'name': SearchDatastore_Task, 'duration_secs': 0.015359} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.388869] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.389165] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 753bb2f7-bf0a-401e-81af-93982558d3b7/753bb2f7-bf0a-401e-81af-93982558d3b7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.389458] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55516ede-f24b-4c86-99e6-66fdd13c9a63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.396544] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 889.396544] env[68285]: value = "task-2891374" [ 889.396544] env[68285]: _type = "Task" [ 889.396544] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.404540] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.456341] env[68285]: DEBUG oslo_concurrency.lockutils [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.528159] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.528716] env[68285]: DEBUG nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 889.531407] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 34.260s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.782999] env[68285]: DEBUG nova.network.neutron [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Successfully updated port: f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.791495] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891373, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.910707] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891374, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.983832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.983832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.983832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.983832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.983832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.985817] env[68285]: INFO nova.compute.manager [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Terminating instance [ 890.036107] env[68285]: DEBUG nova.compute.utils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 890.054134] env[68285]: DEBUG nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 890.056519] env[68285]: DEBUG nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 890.056693] env[68285]: DEBUG nova.network.neutron [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 890.131815] env[68285]: DEBUG nova.policy [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc9efb325ae246a0955c6e87881c2c53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd0cb899a56c409b8210dfc378cd6908', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 890.134051] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92ed92ce-4979-42fa-9e8d-69a38d2888b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.342s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.291035] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.291263] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.291351] env[68285]: DEBUG nova.network.neutron [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.292827] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891373, 'name': CreateSnapshot_Task, 'duration_secs': 0.653205} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.293497] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 890.294756] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0622ff5e-89ef-4044-b2dc-cffec5541900 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.408782] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891374, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589962} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.409043] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 753bb2f7-bf0a-401e-81af-93982558d3b7/753bb2f7-bf0a-401e-81af-93982558d3b7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.409258] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.409520] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd187b59-543f-4ba7-85bb-181a7a7537a5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.420025] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 890.420025] env[68285]: value = "task-2891375" [ 890.420025] env[68285]: _type = "Task" [ 890.420025] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.421083] env[68285]: DEBUG nova.compute.manager [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Received event network-vif-plugged-f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.421360] env[68285]: DEBUG oslo_concurrency.lockutils [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] Acquiring lock "3e656d8d-bd06-4886-9424-4ed76b98aae9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.421506] env[68285]: DEBUG oslo_concurrency.lockutils [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.421627] env[68285]: DEBUG oslo_concurrency.lockutils [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.421828] env[68285]: DEBUG nova.compute.manager [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] No waiting events found dispatching network-vif-plugged-f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 890.421963] env[68285]: WARNING nova.compute.manager [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Received unexpected event network-vif-plugged-f41109eb-9884-4723-8695-fdaae26703db for instance with vm_state building and task_state spawning. [ 890.422174] env[68285]: DEBUG nova.compute.manager [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Received event network-changed-f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.422355] env[68285]: DEBUG nova.compute.manager [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Refreshing instance network info cache due to event network-changed-f41109eb-9884-4723-8695-fdaae26703db. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 890.422520] env[68285]: DEBUG oslo_concurrency.lockutils [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] Acquiring lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.434397] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.490288] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "refresh_cache-29981c10-c6dd-4852-94ad-1f8f0135b8cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.490443] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquired lock "refresh_cache-29981c10-c6dd-4852-94ad-1f8f0135b8cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.490637] env[68285]: DEBUG nova.network.neutron [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.532499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Acquiring lock "5b58896c-cb07-48c8-ace0-385486a3e19d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.532761] env[68285]: DEBUG oslo_concurrency.lockutils [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.532995] env[68285]: INFO nova.compute.manager [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Attaching volume c8b8da28-c2a3-4a88-b6c4-807c92e3bf87 to /dev/sdb [ 890.566967] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a8bed3-53ae-4d93-952a-3d041bc4dd14 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.576112] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a237de34-1bfe-4d29-bdf6-d954f92a7bb1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.595660] env[68285]: DEBUG nova.virt.block_device [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Updating existing volume attachment record: 8f650232-2cf2-41cf-9b29-1d496f5d8da5 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance e28d0927-17c2-4256-93d4-ef0cc2c9b92a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 52fbfbe4-1807-4d6d-9139-ebe30e6bf647 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance fe9a8a13-73ec-4556-a62c-cc49fd01f539 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.601489] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 11de7da5-1d73-4536-b2a1-f7dbbdec14b8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance ec89a2a4-3bfc-45c5-b7f2-239b52995d6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 60144efd-061e-4144-9541-b2321c9b0ec1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5b58896c-cb07-48c8-ace0-385486a3e19d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 12fad42a-1011-4563-b11f-7b141b2a1670 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 87582063-50f9-4518-ad2d-915c9cd49b19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601489] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance e3b01f87-6a4c-4127-9204-2bfa5ff28f38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601802] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance ee45231a-80f2-49b9-8bc7-03a0c920a668 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.601802] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 0d13cc84-bbf2-4e8b-8344-d69acac6bd35 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.601802] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a97df3d2-c182-46d8-95c2-61caccade285 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.601886] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5e101d74-7a82-4118-8f4c-7af9a6b0917a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.602029] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 1c42043d-f8db-4cb9-8147-48d0d32c982b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.602127] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 34aeba05-804e-444c-8e58-69c7721b10b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.602233] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a2a7590d-c415-4955-8a25-4b1411449557 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.602343] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 1b9dd0e2-781f-43d7-a66e-e718a0972c78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.602459] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance b0f32ce2-92fd-4290-a2f4-e5658f775f4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.602560] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 437a18da-8fe4-478e-82a0-3b1a9da47df8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.602664] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance bda5b2fb-1875-4078-a4c1-f76f6abeaaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.602784] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d2c3e3eb-4b05-4e08-bd08-0f42560fcdba is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 890.602894] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 29981c10-c6dd-4852-94ad-1f8f0135b8cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.603016] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 324cc3e5-1c81-498e-b520-e9fca26013ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.604303] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 753bb2f7-bf0a-401e-81af-93982558d3b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.604303] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 3e656d8d-bd06-4886-9424-4ed76b98aae9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.604303] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 631fe0ee-73a6-48c5-9a14-f6a00d2c2942 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.701885] env[68285]: DEBUG nova.network.neutron [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Successfully created port: 0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.817117] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 890.818199] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c5db5c1b-e6bc-4ab3-aa32-036fc517cf2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.832669] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 890.832669] env[68285]: value = "task-2891378" [ 890.832669] env[68285]: _type = "Task" [ 890.832669] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.845918] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891378, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.860846] env[68285]: DEBUG nova.network.neutron [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.936319] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085705} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.936794] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 890.938244] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b072227d-2ac1-4c8c-ba91-d56f0e4f48af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.982184] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 753bb2f7-bf0a-401e-81af-93982558d3b7/753bb2f7-bf0a-401e-81af-93982558d3b7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.987078] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f7ba7fb-92c4-4da6-a90f-51f68e3cd668 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.020414] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 891.020414] env[68285]: value = "task-2891380" [ 891.020414] env[68285]: _type = "Task" [ 891.020414] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.032103] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891380, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.033192] env[68285]: DEBUG nova.network.neutron [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.071458] env[68285]: DEBUG nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 891.101245] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 891.101589] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.101764] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.101949] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.102110] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.102258] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 891.102469] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 891.102622] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 891.102784] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 891.102944] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 891.105623] env[68285]: DEBUG nova.virt.hardware [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 891.106657] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81650961-615f-46d1-8064-e61cfb31de18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.115389] env[68285]: DEBUG nova.network.neutron [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.120019] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 940e0328-970d-4f49-a102-d8a00b8c299b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.127224] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730a146c-7642-4102-ac36-fab282eccc10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.329175] env[68285]: DEBUG nova.network.neutron [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updating instance_info_cache with network_info: [{"id": "f41109eb-9884-4723-8695-fdaae26703db", "address": "fa:16:3e:d2:d7:99", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf41109eb-98", "ovs_interfaceid": "f41109eb-9884-4723-8695-fdaae26703db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.347457] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891378, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.533162] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.580954] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.581228] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.621593] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Releasing lock "refresh_cache-29981c10-c6dd-4852-94ad-1f8f0135b8cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.622021] env[68285]: DEBUG nova.compute.manager [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 891.622214] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.622863] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance b3b7f551-81aa-4ac4-9906-020fac5f01f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.625222] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff395534-7224-4c29-b7f2-60a2255f6ccc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.635431] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.636052] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdca494d-d506-431e-9209-b6adebc4abeb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.645348] env[68285]: DEBUG oslo_vmware.api [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 891.645348] env[68285]: value = "task-2891381" [ 891.645348] env[68285]: _type = "Task" [ 891.645348] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.655181] env[68285]: DEBUG oslo_vmware.api [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.831723] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Releasing lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.832145] env[68285]: DEBUG nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Instance network_info: |[{"id": "f41109eb-9884-4723-8695-fdaae26703db", "address": "fa:16:3e:d2:d7:99", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf41109eb-98", "ovs_interfaceid": "f41109eb-9884-4723-8695-fdaae26703db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 891.832611] env[68285]: DEBUG oslo_concurrency.lockutils [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] Acquired lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.832757] env[68285]: DEBUG nova.network.neutron [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Refreshing network info cache for port f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.834463] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:d7:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '12d8eedb-97cb-4d3b-b364-42d7fd8b3c85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f41109eb-9884-4723-8695-fdaae26703db', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.847022] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 891.848204] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.851010] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa0e32cf-a15c-4e80-9de0-aaca3db0b97b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.881216] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891378, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.882126] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.882126] env[68285]: value = "task-2891382" [ 891.882126] env[68285]: _type = "Task" [ 891.882126] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.890209] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891382, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.031305] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891380, 'name': ReconfigVM_Task, 'duration_secs': 0.84113} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.031623] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 753bb2f7-bf0a-401e-81af-93982558d3b7/753bb2f7-bf0a-401e-81af-93982558d3b7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.032357] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21f319a8-f01c-4394-a056-aaa8cbc4681d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.039184] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 892.039184] env[68285]: value = "task-2891383" [ 892.039184] env[68285]: _type = "Task" [ 892.039184] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.048121] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891383, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.125638] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5266817c-ce3b-4c96-a3bd-32b631c29b81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.155583] env[68285]: DEBUG oslo_vmware.api [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891381, 'name': PowerOffVM_Task, 'duration_secs': 0.439678} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.155858] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 892.156036] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.156289] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9317e09-a6bf-4ee7-85e2-09f5365e1d79 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.178975] env[68285]: DEBUG nova.network.neutron [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updated VIF entry in instance network info cache for port f41109eb-9884-4723-8695-fdaae26703db. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.179473] env[68285]: DEBUG nova.network.neutron [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updating instance_info_cache with network_info: [{"id": "f41109eb-9884-4723-8695-fdaae26703db", "address": "fa:16:3e:d2:d7:99", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf41109eb-98", "ovs_interfaceid": "f41109eb-9884-4723-8695-fdaae26703db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.185337] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 892.185567] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 892.185720] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Deleting the datastore file [datastore2] 29981c10-c6dd-4852-94ad-1f8f0135b8cc {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.185978] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d7bace3-e2b2-4b60-8e49-e76f65b5b2d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.192250] env[68285]: DEBUG oslo_vmware.api [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for the task: (returnval){ [ 892.192250] env[68285]: value = "task-2891385" [ 892.192250] env[68285]: _type = "Task" [ 892.192250] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.199934] env[68285]: DEBUG oslo_vmware.api [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.361369] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891378, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.391750] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891382, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.559025] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891383, 'name': Rename_Task, 'duration_secs': 0.423864} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.559025] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.559025] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd1e5768-8f86-4dd5-8b5f-23db9280214c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.565203] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 892.565203] env[68285]: value = "task-2891386" [ 892.565203] env[68285]: _type = "Task" [ 892.565203] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.574300] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.628796] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d1b5abfa-fd38-4d17-b75f-5036af841d24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.682983] env[68285]: DEBUG oslo_concurrency.lockutils [req-b8c05a29-6579-406b-b354-1978fabd9502 req-114982fa-9b2c-4985-bf81-a47ec3bf583d service nova] Releasing lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.706034] env[68285]: DEBUG oslo_vmware.api [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Task: {'id': task-2891385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116361} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.706034] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.706034] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.706034] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.706034] env[68285]: INFO nova.compute.manager [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Took 1.08 seconds to destroy the instance on the hypervisor. [ 892.706034] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.706034] env[68285]: DEBUG nova.compute.manager [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 892.706034] env[68285]: DEBUG nova.network.neutron [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.753276] env[68285]: DEBUG nova.network.neutron [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.859843] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891378, 'name': CloneVM_Task, 'duration_secs': 1.999454} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.860119] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Created linked-clone VM from snapshot [ 892.860852] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161f96e6-b178-45cd-a49b-a714aeb968e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.868744] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Uploading image 1ccf110b-dc59-470f-846b-3e2f555b3297 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 892.890205] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 892.890470] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b0a8b818-a8d0-4637-b65f-6ea4e42c55f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.896798] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891382, 'name': CreateVM_Task, 'duration_secs': 0.69556} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.898282] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.898619] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 892.898619] env[68285]: value = "task-2891387" [ 892.898619] env[68285]: _type = "Task" [ 892.898619] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.899220] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.899412] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.899755] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 892.900051] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57a13fd8-ffd0-4a21-9cfd-212243e2df74 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.911359] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891387, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.911683] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 892.911683] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527c921a-944f-aaac-32a2-da1921180e8c" [ 892.911683] env[68285]: _type = "Task" [ 892.911683] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.923200] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527c921a-944f-aaac-32a2-da1921180e8c, 'name': SearchDatastore_Task, 'duration_secs': 0.011869} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.923505] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.923750] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.923993] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.924163] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.924343] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.924598] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6aa52e3d-c77d-4bb0-9e09-5fff9102c925 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.932355] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.932526] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.933330] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-373a8918-0f27-4cf5-97ba-db939565f1a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.939141] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 892.939141] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b63a6a-edf0-be1a-27c4-1a97bbf9bcb1" [ 892.939141] env[68285]: _type = "Task" [ 892.939141] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.947109] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b63a6a-edf0-be1a-27c4-1a97bbf9bcb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.028238] env[68285]: DEBUG nova.network.neutron [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Successfully updated port: 0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 893.076051] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891386, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.134267] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 893.193962] env[68285]: DEBUG nova.compute.manager [req-6e23f9a9-c93f-4add-b30a-7231743eac4b req-5dfc9e0d-3918-43b5-99c2-83bef2e8c582 service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Received event network-vif-plugged-0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 893.194115] env[68285]: DEBUG oslo_concurrency.lockutils [req-6e23f9a9-c93f-4add-b30a-7231743eac4b req-5dfc9e0d-3918-43b5-99c2-83bef2e8c582 service nova] Acquiring lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.194939] env[68285]: DEBUG oslo_concurrency.lockutils [req-6e23f9a9-c93f-4add-b30a-7231743eac4b req-5dfc9e0d-3918-43b5-99c2-83bef2e8c582 service nova] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.195163] env[68285]: DEBUG oslo_concurrency.lockutils [req-6e23f9a9-c93f-4add-b30a-7231743eac4b req-5dfc9e0d-3918-43b5-99c2-83bef2e8c582 service nova] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.195524] env[68285]: DEBUG nova.compute.manager [req-6e23f9a9-c93f-4add-b30a-7231743eac4b req-5dfc9e0d-3918-43b5-99c2-83bef2e8c582 service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] No waiting events found dispatching network-vif-plugged-0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 893.195644] env[68285]: WARNING nova.compute.manager [req-6e23f9a9-c93f-4add-b30a-7231743eac4b req-5dfc9e0d-3918-43b5-99c2-83bef2e8c582 service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Received unexpected event network-vif-plugged-0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 for instance with vm_state building and task_state spawning. [ 893.256857] env[68285]: DEBUG nova.network.neutron [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.410680] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891387, 'name': Destroy_Task, 'duration_secs': 0.335129} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.410962] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Destroyed the VM [ 893.411170] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 893.411429] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-567d65ce-414d-4357-8792-45777e3e14ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.418321] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 893.418321] env[68285]: value = "task-2891389" [ 893.418321] env[68285]: _type = "Task" [ 893.418321] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.425840] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891389, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.448847] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b63a6a-edf0-be1a-27c4-1a97bbf9bcb1, 'name': SearchDatastore_Task, 'duration_secs': 0.010228} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.450033] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce822c1f-f7cf-486d-8984-72e24cfc8679 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.455164] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 893.455164] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5252e335-1f00-a727-32dc-a16f885600a7" [ 893.455164] env[68285]: _type = "Task" [ 893.455164] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.462633] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5252e335-1f00-a727-32dc-a16f885600a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.530938] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "refresh_cache-631fe0ee-73a6-48c5-9a14-f6a00d2c2942" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.531080] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquired lock "refresh_cache-631fe0ee-73a6-48c5-9a14-f6a00d2c2942" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.531236] env[68285]: DEBUG nova.network.neutron [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.577339] env[68285]: DEBUG oslo_vmware.api [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891386, 'name': PowerOnVM_Task, 'duration_secs': 0.652099} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.577654] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.577852] env[68285]: INFO nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Took 9.75 seconds to spawn the instance on the hypervisor. [ 893.578038] env[68285]: DEBUG nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 893.578791] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b4cf9e-5cf9-4919-85cd-a1222342c2b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.640557] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 2a1cc678-2bb2-403e-b6e8-afdeb8362eac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 893.760941] env[68285]: INFO nova.compute.manager [-] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Took 1.06 seconds to deallocate network for instance. [ 893.930257] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891389, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.966234] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5252e335-1f00-a727-32dc-a16f885600a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009375} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.966502] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.966932] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 3e656d8d-bd06-4886-9424-4ed76b98aae9/3e656d8d-bd06-4886-9424-4ed76b98aae9.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.967028] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad89070f-773f-4a08-a830-6ce4144bdd98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.973703] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 893.973703] env[68285]: value = "task-2891390" [ 893.973703] env[68285]: _type = "Task" [ 893.973703] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.982557] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.083747] env[68285]: DEBUG nova.network.neutron [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.095634] env[68285]: INFO nova.compute.manager [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Took 48.33 seconds to build instance. [ 894.111937] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.111937] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.112144] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.112327] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.112842] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.114519] env[68285]: INFO nova.compute.manager [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Terminating instance [ 894.143362] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d4f20336-9c29-4aac-8c0d-f577749cd7d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 894.267213] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.280583] env[68285]: DEBUG nova.network.neutron [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Updating instance_info_cache with network_info: [{"id": "0a48ba66-2ec4-4dfe-94f1-ea0a9f494005", "address": "fa:16:3e:69:f9:27", "network": {"id": "e47fbb13-1f41-4b39-bc77-8f54dd2fbc8b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-426739278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd0cb899a56c409b8210dfc378cd6908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a48ba66-2e", "ovs_interfaceid": "0a48ba66-2ec4-4dfe-94f1-ea0a9f494005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.431050] env[68285]: DEBUG oslo_vmware.api [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891389, 'name': RemoveSnapshot_Task, 'duration_secs': 0.685087} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.431050] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 894.484232] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478532} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.484489] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 3e656d8d-bd06-4886-9424-4ed76b98aae9/3e656d8d-bd06-4886-9424-4ed76b98aae9.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.484695] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.484937] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28df15a9-ca52-4196-bcf4-70956cf5fe66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.490787] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 894.490787] env[68285]: value = "task-2891391" [ 894.490787] env[68285]: _type = "Task" [ 894.490787] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.499710] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891391, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.598665] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d53c8128-7d9f-4e67-bb59-aa3fc722ca58 tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.892s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.618643] env[68285]: DEBUG nova.compute.manager [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 894.618873] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 894.620463] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b473b852-e435-4d13-9b3f-692dc50a40ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.628214] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.628562] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51cfc098-dde2-410f-96bc-ad3c6f5f4a64 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.635518] env[68285]: DEBUG oslo_vmware.api [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 894.635518] env[68285]: value = "task-2891392" [ 894.635518] env[68285]: _type = "Task" [ 894.635518] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.643929] env[68285]: DEBUG oslo_vmware.api [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.650326] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance c7ab28c3-a316-4685-b876-a0e7c657ec35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 894.650654] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 22 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 894.651282] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4864MB phys_disk=200GB used_disk=22GB total_vcpus=48 used_vcpus=22 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 894.785028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Releasing lock "refresh_cache-631fe0ee-73a6-48c5-9a14-f6a00d2c2942" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.785264] env[68285]: DEBUG nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Instance network_info: |[{"id": "0a48ba66-2ec4-4dfe-94f1-ea0a9f494005", "address": "fa:16:3e:69:f9:27", "network": {"id": "e47fbb13-1f41-4b39-bc77-8f54dd2fbc8b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-426739278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd0cb899a56c409b8210dfc378cd6908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a48ba66-2e", "ovs_interfaceid": "0a48ba66-2ec4-4dfe-94f1-ea0a9f494005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 894.786048] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:f9:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5215e5b-294b-4e8c-bd06-355e9955ab1d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a48ba66-2ec4-4dfe-94f1-ea0a9f494005', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.794556] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Creating folder: Project (bd0cb899a56c409b8210dfc378cd6908). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.797466] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc5d4c65-e37e-4ef9-b70e-ed97ecc48046 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.809191] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Created folder: Project (bd0cb899a56c409b8210dfc378cd6908) in parent group-v580775. [ 894.809389] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Creating folder: Instances. Parent ref: group-v580888. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.812148] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcf88159-5b1f-4ab0-aa3b-2ce99d0ea14d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.820603] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Created folder: Instances in parent group-v580888. [ 894.820849] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.821075] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.821263] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8c54325-9eb9-47e7-8e89-7aa16d54fd43 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.842997] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.842997] env[68285]: value = "task-2891395" [ 894.842997] env[68285]: _type = "Task" [ 894.842997] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.852764] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891395, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.936026] env[68285]: WARNING nova.compute.manager [None req-7c883f6b-d52e-45a5-933a-08236dbd10e7 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Image not found during snapshot: nova.exception.ImageNotFound: Image 1ccf110b-dc59-470f-846b-3e2f555b3297 could not be found. [ 895.005185] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891391, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073508} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.008089] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.009323] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d998bf-0f07-4dad-9def-e5c0608d6a34 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.033861] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 3e656d8d-bd06-4886-9424-4ed76b98aae9/3e656d8d-bd06-4886-9424-4ed76b98aae9.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.036782] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb6dc3a3-fdc8-4a87-b4a4-3bb2ded51386 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.050861] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "753bb2f7-bf0a-401e-81af-93982558d3b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.051141] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.051310] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "753bb2f7-bf0a-401e-81af-93982558d3b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.051488] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.051654] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.053999] env[68285]: INFO nova.compute.manager [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Terminating instance [ 895.057694] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 895.057694] env[68285]: value = "task-2891396" [ 895.057694] env[68285]: _type = "Task" [ 895.057694] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.066053] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.102574] env[68285]: DEBUG nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 895.152050] env[68285]: DEBUG oslo_vmware.api [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891392, 'name': PowerOffVM_Task, 'duration_secs': 0.413427} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.152050] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.152050] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.152050] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5044461a-fa1b-4f40-98c6-25ddfa42cf25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.158893] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 895.160328] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580885', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'name': 'volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b58896c-cb07-48c8-ace0-385486a3e19d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'serial': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 895.161231] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5393bf8d-4704-49bc-a226-d3823a3508d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.182383] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e3ac92-de8b-4952-84e4-66e16fbda36d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.209355] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87/volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.212280] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de3f31ee-86d5-4ae5-9538-40f0dd14493a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.231207] env[68285]: DEBUG oslo_vmware.api [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Waiting for the task: (returnval){ [ 895.231207] env[68285]: value = "task-2891398" [ 895.231207] env[68285]: _type = "Task" [ 895.231207] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.236844] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270c9d39-725a-4868-b9c3-c0e4270e3353 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.240132] env[68285]: DEBUG nova.compute.manager [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Received event network-changed-0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 895.240311] env[68285]: DEBUG nova.compute.manager [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Refreshing instance network info cache due to event network-changed-0a48ba66-2ec4-4dfe-94f1-ea0a9f494005. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 895.241112] env[68285]: DEBUG oslo_concurrency.lockutils [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] Acquiring lock "refresh_cache-631fe0ee-73a6-48c5-9a14-f6a00d2c2942" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.241112] env[68285]: DEBUG oslo_concurrency.lockutils [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] Acquired lock "refresh_cache-631fe0ee-73a6-48c5-9a14-f6a00d2c2942" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.241112] env[68285]: DEBUG nova.network.neutron [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Refreshing network info cache for port 0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.248068] env[68285]: DEBUG oslo_vmware.api [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891398, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.251291] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e898d7-7f47-4a5e-8a3d-1c5790482a72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.289017] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaef09c-449b-4102-8709-186bf66e3dd2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.289017] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.289017] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.289017] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleting the datastore file [datastore1] bda5b2fb-1875-4078-a4c1-f76f6abeaaf5 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.289017] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed0c5f93-2ef7-4840-951e-d9b1ce700e07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.296671] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffd686c-1166-4d45-bb3c-6b8b67943ce5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.300595] env[68285]: DEBUG oslo_vmware.api [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 895.300595] env[68285]: value = "task-2891399" [ 895.300595] env[68285]: _type = "Task" [ 895.300595] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.312537] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.318489] env[68285]: DEBUG oslo_vmware.api [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891399, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.353549] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891395, 'name': CreateVM_Task, 'duration_secs': 0.390831} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.354058] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 895.354786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.354966] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.355298] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 895.355577] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-851982ec-ef0e-46dc-bf0d-166cc1071466 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.361029] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 895.361029] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5223a59d-2571-18af-c78e-3d8436a0c7db" [ 895.361029] env[68285]: _type = "Task" [ 895.361029] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.368660] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5223a59d-2571-18af-c78e-3d8436a0c7db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.560076] env[68285]: DEBUG nova.compute.manager [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 895.560076] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.560786] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc0c7bd-610c-44c5-beb7-5672e970e87c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.573035] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.575724] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.575848] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e25b7cc-4c84-490c-8987-ab6d59e8f7dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.582133] env[68285]: DEBUG oslo_vmware.api [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 895.582133] env[68285]: value = "task-2891400" [ 895.582133] env[68285]: _type = "Task" [ 895.582133] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.590100] env[68285]: DEBUG oslo_vmware.api [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891400, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.622390] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.741490] env[68285]: DEBUG oslo_vmware.api [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891398, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.813477] env[68285]: DEBUG oslo_vmware.api [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891399, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142227} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.820417] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.820417] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.820417] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.820417] env[68285]: INFO nova.compute.manager [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Took 1.20 seconds to destroy the instance on the hypervisor. [ 895.820417] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 895.821360] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.827033] env[68285]: DEBUG nova.compute.manager [-] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 895.827226] env[68285]: DEBUG nova.network.neutron [-] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.872085] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5223a59d-2571-18af-c78e-3d8436a0c7db, 'name': SearchDatastore_Task, 'duration_secs': 0.00875} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.872814] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.872973] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.873149] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.873364] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.873745] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.874021] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d4d4b41-f0a9-4d66-b9bb-ef72f997afa8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.885946] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.886172] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.886907] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cbc62b8-e590-4727-8be1-114927d6e89d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.892932] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 895.892932] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520a58e2-d919-dc47-946a-eba30bd67ff7" [ 895.892932] env[68285]: _type = "Task" [ 895.892932] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.901567] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520a58e2-d919-dc47-946a-eba30bd67ff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.070219] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891396, 'name': ReconfigVM_Task, 'duration_secs': 0.718839} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.070503] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 3e656d8d-bd06-4886-9424-4ed76b98aae9/3e656d8d-bd06-4886-9424-4ed76b98aae9.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.071164] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-300f0b01-32a6-4dbf-b098-c89384f76a88 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.078065] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 896.078065] env[68285]: value = "task-2891401" [ 896.078065] env[68285]: _type = "Task" [ 896.078065] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.091101] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891401, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.099014] env[68285]: DEBUG oslo_vmware.api [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891400, 'name': PowerOffVM_Task, 'duration_secs': 0.270878} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.099603] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 896.099798] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 896.100068] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-785a8641-d5db-43fb-8694-3856c7feeb2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.179304] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "324cc3e5-1c81-498e-b520-e9fca26013ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.179539] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.179739] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "324cc3e5-1c81-498e-b520-e9fca26013ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.179918] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.180101] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.182250] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 896.182469] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 896.182653] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Deleting the datastore file [datastore2] 753bb2f7-bf0a-401e-81af-93982558d3b7 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 896.183150] env[68285]: INFO nova.compute.manager [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Terminating instance [ 896.184418] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fccf5a2-fbde-4498-8a19-4cf9073910c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.192050] env[68285]: DEBUG oslo_vmware.api [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for the task: (returnval){ [ 896.192050] env[68285]: value = "task-2891403" [ 896.192050] env[68285]: _type = "Task" [ 896.192050] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.205018] env[68285]: DEBUG oslo_vmware.api [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891403, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.246224] env[68285]: DEBUG oslo_vmware.api [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891398, 'name': ReconfigVM_Task, 'duration_secs': 0.899679} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.246531] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Reconfigured VM instance instance-0000000d to attach disk [datastore2] volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87/volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.254439] env[68285]: DEBUG nova.network.neutron [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Updated VIF entry in instance network info cache for port 0a48ba66-2ec4-4dfe-94f1-ea0a9f494005. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.254797] env[68285]: DEBUG nova.network.neutron [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Updating instance_info_cache with network_info: [{"id": "0a48ba66-2ec4-4dfe-94f1-ea0a9f494005", "address": "fa:16:3e:69:f9:27", "network": {"id": "e47fbb13-1f41-4b39-bc77-8f54dd2fbc8b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-426739278-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd0cb899a56c409b8210dfc378cd6908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a48ba66-2e", "ovs_interfaceid": "0a48ba66-2ec4-4dfe-94f1-ea0a9f494005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.256046] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dff204bb-58a6-4cf0-aa3b-7c677b114beb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.274163] env[68285]: DEBUG oslo_vmware.api [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Waiting for the task: (returnval){ [ 896.274163] env[68285]: value = "task-2891404" [ 896.274163] env[68285]: _type = "Task" [ 896.274163] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.287171] env[68285]: DEBUG oslo_vmware.api [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891404, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.328619] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 896.328619] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.797s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.328922] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.555s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.329180] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.331687] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.246s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.331919] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.334060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.092s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.335023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.336016] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.470s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.337738] env[68285]: INFO nova.compute.claims [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.364723] env[68285]: INFO nova.scheduler.client.report [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleted allocations for instance 0d13cc84-bbf2-4e8b-8344-d69acac6bd35 [ 896.366642] env[68285]: INFO nova.scheduler.client.report [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Deleted allocations for instance ee45231a-80f2-49b9-8bc7-03a0c920a668 [ 896.384206] env[68285]: INFO nova.scheduler.client.report [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Deleted allocations for instance 1c42043d-f8db-4cb9-8147-48d0d32c982b [ 896.405276] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520a58e2-d919-dc47-946a-eba30bd67ff7, 'name': SearchDatastore_Task, 'duration_secs': 0.009218} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.407648] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-105ca2ee-3abe-47b7-821d-ac7f70d18384 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.414253] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 896.414253] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527ccc01-713b-c601-985d-716417db705a" [ 896.414253] env[68285]: _type = "Task" [ 896.414253] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.423985] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527ccc01-713b-c601-985d-716417db705a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.588060] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891401, 'name': Rename_Task, 'duration_secs': 0.23677} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.588580] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 896.588580] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a759724-157f-4f4d-b19a-673ef824307a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.594961] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 896.594961] env[68285]: value = "task-2891405" [ 896.594961] env[68285]: _type = "Task" [ 896.594961] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.603610] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891405, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.689807] env[68285]: DEBUG nova.compute.manager [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 896.689974] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 896.690860] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e2bd9b-81e8-47ae-93d9-d3a082a29d86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.702322] env[68285]: DEBUG oslo_vmware.api [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Task: {'id': task-2891403, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168927} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.704224] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.704462] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.704689] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.704894] env[68285]: INFO nova.compute.manager [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 896.705147] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.705372] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 896.705616] env[68285]: DEBUG nova.compute.manager [-] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 896.705728] env[68285]: DEBUG nova.network.neutron [-] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 896.707569] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-529e2b47-63a3-4ca1-a531-ecfb347d9144 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.714031] env[68285]: DEBUG oslo_vmware.api [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 896.714031] env[68285]: value = "task-2891406" [ 896.714031] env[68285]: _type = "Task" [ 896.714031] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.722696] env[68285]: DEBUG oslo_vmware.api [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.768371] env[68285]: DEBUG oslo_concurrency.lockutils [req-4b273431-d525-4784-b97a-2e3d7ab3fd4e req-0c7025f1-2058-409a-a98f-9cc5db92ea0d service nova] Releasing lock "refresh_cache-631fe0ee-73a6-48c5-9a14-f6a00d2c2942" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.784498] env[68285]: DEBUG oslo_vmware.api [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891404, 'name': ReconfigVM_Task, 'duration_secs': 0.156276} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.784692] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580885', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'name': 'volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b58896c-cb07-48c8-ace0-385486a3e19d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'serial': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 896.856385] env[68285]: DEBUG nova.network.neutron [-] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.873797] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9050ca66-2538-4325-9975-68fa87b96046 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "0d13cc84-bbf2-4e8b-8344-d69acac6bd35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.246s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.879712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-34189b0e-a590-4f0f-9b8e-6141e83953d7 tempest-ListServersNegativeTestJSON-1192129330 tempest-ListServersNegativeTestJSON-1192129330-project-member] Lock "ee45231a-80f2-49b9-8bc7-03a0c920a668" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.552s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.894068] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9f8a776-cbca-42d8-9919-cf3b3d1a1174 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "1c42043d-f8db-4cb9-8147-48d0d32c982b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.188s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.924781] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527ccc01-713b-c601-985d-716417db705a, 'name': SearchDatastore_Task, 'duration_secs': 0.00983} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.925039] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.925203] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 631fe0ee-73a6-48c5-9a14-f6a00d2c2942/631fe0ee-73a6-48c5-9a14-f6a00d2c2942.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 896.925512] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76633f11-0978-40d4-aa90-9fd31aded730 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.933121] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 896.933121] env[68285]: value = "task-2891407" [ 896.933121] env[68285]: _type = "Task" [ 896.933121] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.941606] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.106114] env[68285]: DEBUG oslo_vmware.api [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891405, 'name': PowerOnVM_Task, 'duration_secs': 0.503446} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.106114] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 897.106114] env[68285]: INFO nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Took 8.30 seconds to spawn the instance on the hypervisor. [ 897.106114] env[68285]: DEBUG nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 897.106641] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3af4aa0-9c46-4f50-905e-94de0f6c4b1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.225073] env[68285]: DEBUG oslo_vmware.api [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891406, 'name': PowerOffVM_Task, 'duration_secs': 0.195847} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.225379] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.225523] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 897.225773] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7cc2579-49b3-4477-b523-f598dfdafd89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.270054] env[68285]: DEBUG nova.compute.manager [req-967f316b-1a20-4018-b919-2a04ccf75b14 req-24d9e68f-1e59-48d2-961e-71b22389af9e service nova] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Received event network-vif-deleted-5d280454-d5ca-4bfd-b516-72294e207a35 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.270054] env[68285]: DEBUG nova.compute.manager [req-967f316b-1a20-4018-b919-2a04ccf75b14 req-24d9e68f-1e59-48d2-961e-71b22389af9e service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Received event network-vif-deleted-f2956a29-080d-46ea-92ed-5591a1c77685 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.270195] env[68285]: INFO nova.compute.manager [req-967f316b-1a20-4018-b919-2a04ccf75b14 req-24d9e68f-1e59-48d2-961e-71b22389af9e service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Neutron deleted interface f2956a29-080d-46ea-92ed-5591a1c77685; detaching it from the instance and deleting it from the info cache [ 897.270314] env[68285]: DEBUG nova.network.neutron [req-967f316b-1a20-4018-b919-2a04ccf75b14 req-24d9e68f-1e59-48d2-961e-71b22389af9e service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.295025] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 897.295025] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 897.295025] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleting the datastore file [datastore1] 324cc3e5-1c81-498e-b520-e9fca26013ef {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 897.295025] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5afa72f-1f1e-43a6-8963-dee861517336 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.300623] env[68285]: DEBUG oslo_vmware.api [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 897.300623] env[68285]: value = "task-2891409" [ 897.300623] env[68285]: _type = "Task" [ 897.300623] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.310131] env[68285]: DEBUG oslo_vmware.api [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.361736] env[68285]: INFO nova.compute.manager [-] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Took 1.53 seconds to deallocate network for instance. [ 897.446246] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891407, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.545698] env[68285]: DEBUG nova.network.neutron [-] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.629144] env[68285]: INFO nova.compute.manager [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Took 43.36 seconds to build instance. [ 897.773137] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd32af06-1910-4787-a4e0-10d045595869 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.783011] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8784a17b-9b5a-496e-9c54-78d69f266a6f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.811090] env[68285]: DEBUG oslo_vmware.api [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327192} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.811569] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 897.811993] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 897.812193] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 897.812375] env[68285]: INFO nova.compute.manager [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Took 1.12 seconds to destroy the instance on the hypervisor. [ 897.812700] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.829404] env[68285]: DEBUG nova.compute.manager [-] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 897.829517] env[68285]: DEBUG nova.network.neutron [-] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 897.832021] env[68285]: DEBUG nova.compute.manager [req-967f316b-1a20-4018-b919-2a04ccf75b14 req-24d9e68f-1e59-48d2-961e-71b22389af9e service nova] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Detach interface failed, port_id=f2956a29-080d-46ea-92ed-5591a1c77685, reason: Instance 753bb2f7-bf0a-401e-81af-93982558d3b7 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 897.852117] env[68285]: DEBUG nova.objects.instance [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lazy-loading 'flavor' on Instance uuid 5b58896c-cb07-48c8-ace0-385486a3e19d {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.869175] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.948378] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.749269} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.948786] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 631fe0ee-73a6-48c5-9a14-f6a00d2c2942/631fe0ee-73a6-48c5-9a14-f6a00d2c2942.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.949860] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.949860] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-138ac4ad-63f8-40c5-b6fa-3419d08e5b9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.959220] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 897.959220] env[68285]: value = "task-2891410" [ 897.959220] env[68285]: _type = "Task" [ 897.959220] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.968029] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.997446] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c196a3ee-5021-4a4e-904e-8f7e3a754a07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.008173] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcae9eca-ccba-47f7-b52e-7ec710251574 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.040672] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31911c36-03c3-495e-a421-94243cd11695 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.047829] env[68285]: INFO nova.compute.manager [-] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Took 1.34 seconds to deallocate network for instance. [ 898.052959] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4aac05-f087-4696-986b-5ccd7292e845 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.071425] env[68285]: DEBUG nova.compute.provider_tree [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.134194] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9b0b02d3-6a6b-4ae7-ba36-52b617831d46 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.227s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.358011] env[68285]: DEBUG oslo_concurrency.lockutils [None req-01244775-d759-4662-a7cf-4b39f491980d tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.825s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.469870] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.261834} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.470189] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.471045] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a9dd34-008e-4af9-8070-a1d91987ca01 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.492991] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 631fe0ee-73a6-48c5-9a14-f6a00d2c2942/631fe0ee-73a6-48c5-9a14-f6a00d2c2942.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.493292] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-784f162f-a1b9-4267-9434-03f919e4ed09 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.513170] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 898.513170] env[68285]: value = "task-2891411" [ 898.513170] env[68285]: _type = "Task" [ 898.513170] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.521065] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.547858] env[68285]: DEBUG oslo_concurrency.lockutils [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Acquiring lock "5b58896c-cb07-48c8-ace0-385486a3e19d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.548122] env[68285]: DEBUG oslo_concurrency.lockutils [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.561934] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.574771] env[68285]: DEBUG nova.scheduler.client.report [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.642660] env[68285]: DEBUG nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.031762] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891411, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.051996] env[68285]: INFO nova.compute.manager [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Detaching volume c8b8da28-c2a3-4a88-b6c4-807c92e3bf87 [ 899.081087] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.745s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.081590] env[68285]: DEBUG nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 899.090328] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.309s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.090328] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.090924] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.258s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.096042] env[68285]: INFO nova.compute.claims [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.100215] env[68285]: INFO nova.virt.block_device [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Attempting to driver detach volume c8b8da28-c2a3-4a88-b6c4-807c92e3bf87 from mountpoint /dev/sdb [ 899.100591] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 899.100899] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580885', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'name': 'volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b58896c-cb07-48c8-ace0-385486a3e19d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'serial': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 899.103173] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fd4d12-04b0-4621-8cf1-113316b9dce2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.138784] env[68285]: DEBUG nova.network.neutron [-] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.141914] env[68285]: INFO nova.scheduler.client.report [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Deleted allocations for instance fe9a8a13-73ec-4556-a62c-cc49fd01f539 [ 899.146190] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698d73df-6cee-4654-9a56-1b96fd3aaed5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.166042] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3e7101-da33-407d-8d66-50c485a1f817 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.169963] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.189245] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c4a8a2-371e-4141-ad23-a84ee3287448 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.206322] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] The volume has not been displaced from its original location: [datastore2] volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87/volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 899.212583] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 899.212966] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0020b980-a451-46aa-91c3-1092b7a20a3b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.235073] env[68285]: DEBUG oslo_vmware.api [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Waiting for the task: (returnval){ [ 899.235073] env[68285]: value = "task-2891412" [ 899.235073] env[68285]: _type = "Task" [ 899.235073] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.245776] env[68285]: DEBUG oslo_vmware.api [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.481805] env[68285]: DEBUG nova.compute.manager [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Received event network-vif-deleted-474e30d6-abd2-42ca-a4e9-42f115b28cad {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.482194] env[68285]: DEBUG nova.compute.manager [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Received event network-changed-f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.482369] env[68285]: DEBUG nova.compute.manager [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Refreshing instance network info cache due to event network-changed-f41109eb-9884-4723-8695-fdaae26703db. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 899.482674] env[68285]: DEBUG oslo_concurrency.lockutils [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] Acquiring lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.482883] env[68285]: DEBUG oslo_concurrency.lockutils [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] Acquired lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.483091] env[68285]: DEBUG nova.network.neutron [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Refreshing network info cache for port f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.525452] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891411, 'name': ReconfigVM_Task, 'duration_secs': 0.729988} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.525600] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 631fe0ee-73a6-48c5-9a14-f6a00d2c2942/631fe0ee-73a6-48c5-9a14-f6a00d2c2942.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.526267] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b385db92-915d-4e0b-8cc1-a504efa443fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.534545] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 899.534545] env[68285]: value = "task-2891413" [ 899.534545] env[68285]: _type = "Task" [ 899.534545] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.546835] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891413, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.605968] env[68285]: DEBUG nova.compute.utils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 899.610207] env[68285]: DEBUG nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.610207] env[68285]: DEBUG nova.network.neutron [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.653494] env[68285]: INFO nova.compute.manager [-] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Took 1.82 seconds to deallocate network for instance. [ 899.662618] env[68285]: DEBUG oslo_concurrency.lockutils [None req-072b337a-52dc-42c4-a66b-a51dea991d13 tempest-ServersAdminNegativeTestJSON-1568229079 tempest-ServersAdminNegativeTestJSON-1568229079-project-member] Lock "fe9a8a13-73ec-4556-a62c-cc49fd01f539" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.361s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.692633] env[68285]: DEBUG nova.policy [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ee600b2186b4c79b1104df6953b5f92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd7990c4a58b4c0eb1b5a6474baefbc8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 899.749456] env[68285]: DEBUG oslo_vmware.api [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891412, 'name': ReconfigVM_Task, 'duration_secs': 0.388399} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.749908] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 899.754609] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc6768d8-5232-401c-b019-e462c86be66f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.772330] env[68285]: DEBUG oslo_vmware.api [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Waiting for the task: (returnval){ [ 899.772330] env[68285]: value = "task-2891414" [ 899.772330] env[68285]: _type = "Task" [ 899.772330] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.783391] env[68285]: DEBUG oslo_vmware.api [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891414, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.045626] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891413, 'name': Rename_Task, 'duration_secs': 0.257905} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.045949] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.046208] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26a73dd6-960c-41a5-80f9-602460b09d17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.053752] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.054054] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.054363] env[68285]: INFO nova.compute.manager [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Rebooting instance [ 900.055643] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 900.055643] env[68285]: value = "task-2891415" [ 900.055643] env[68285]: _type = "Task" [ 900.055643] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.066931] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.112080] env[68285]: DEBUG nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 900.161778] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.287135] env[68285]: DEBUG oslo_vmware.api [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Task: {'id': task-2891414, 'name': ReconfigVM_Task, 'duration_secs': 0.150275} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.287479] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580885', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'name': 'volume-c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5b58896c-cb07-48c8-ace0-385486a3e19d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87', 'serial': 'c8b8da28-c2a3-4a88-b6c4-807c92e3bf87'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 900.362149] env[68285]: DEBUG nova.network.neutron [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Successfully created port: 2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.378528] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "8b473550-4a40-48a5-9e1c-7c48df828e61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.378812] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.491593] env[68285]: DEBUG nova.network.neutron [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updated VIF entry in instance network info cache for port f41109eb-9884-4723-8695-fdaae26703db. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.492210] env[68285]: DEBUG nova.network.neutron [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updating instance_info_cache with network_info: [{"id": "f41109eb-9884-4723-8695-fdaae26703db", "address": "fa:16:3e:d2:d7:99", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf41109eb-98", "ovs_interfaceid": "f41109eb-9884-4723-8695-fdaae26703db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.570677] env[68285]: DEBUG oslo_vmware.api [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891415, 'name': PowerOnVM_Task, 'duration_secs': 0.494527} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.574092] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.575026] env[68285]: INFO nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Took 9.50 seconds to spawn the instance on the hypervisor. [ 900.575366] env[68285]: DEBUG nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.576865] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4100ef4-b26b-41d6-b91e-86a938801f15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.587042] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.699868] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6ea2ab-5a71-4291-85ee-68dd8c4f310e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.708679] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8abee4-24bb-441b-bf6f-16942fa26423 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.749619] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9c4545-e0e2-4c1b-9914-fd47a2b33446 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.758118] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77786ce6-521e-4420-a0bc-8da0ebbb2e33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.771525] env[68285]: DEBUG nova.compute.provider_tree [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.861742] env[68285]: DEBUG nova.objects.instance [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lazy-loading 'flavor' on Instance uuid 5b58896c-cb07-48c8-ace0-385486a3e19d {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.996885] env[68285]: DEBUG oslo_concurrency.lockutils [req-8fd74810-8f25-4e5f-a7ce-b5f76832c597 req-59c7cc6e-1ec2-4e7a-8ac8-ca2559f0c5d8 service nova] Releasing lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.996885] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquired lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.996885] env[68285]: DEBUG nova.network.neutron [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.105198] env[68285]: INFO nova.compute.manager [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Took 46.21 seconds to build instance. [ 901.122108] env[68285]: DEBUG nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 901.156748] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 901.158868] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.159026] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 901.159214] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.159741] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 901.159741] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 901.159741] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 901.159887] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 901.160807] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 901.160807] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 901.160807] env[68285]: DEBUG nova.virt.hardware [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 901.162021] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f649f53-9975-40dc-bab3-2d1e8f692872 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.169840] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6d510a-7777-4022-82ad-da2e3fc38618 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.275720] env[68285]: DEBUG nova.scheduler.client.report [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.339637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "c690490f-9278-4595-8286-d4fd970bbc39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.339933] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.607903] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ada1fd5-7d4a-468a-afc9-6dc6ba6bae41 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.165s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.781368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.781684] env[68285]: DEBUG nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 901.788017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.239s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.788017] env[68285]: INFO nova.compute.claims [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.870704] env[68285]: DEBUG oslo_concurrency.lockutils [None req-db09a48b-861d-4085-9377-ff21211777a6 tempest-VolumesAssistedSnapshotsTest-885836439 tempest-VolumesAssistedSnapshotsTest-885836439-project-admin] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.322s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.884390] env[68285]: DEBUG nova.network.neutron [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updating instance_info_cache with network_info: [{"id": "f41109eb-9884-4723-8695-fdaae26703db", "address": "fa:16:3e:d2:d7:99", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf41109eb-98", "ovs_interfaceid": "f41109eb-9884-4723-8695-fdaae26703db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.112182] env[68285]: DEBUG nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 902.301569] env[68285]: DEBUG nova.compute.utils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 902.302033] env[68285]: DEBUG nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 902.302033] env[68285]: DEBUG nova.network.neutron [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.391282] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Releasing lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.434858] env[68285]: DEBUG nova.policy [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd27450be410458ba1f009b191126755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f891a62d3df3400fa53ac94230bcb8a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 902.450100] env[68285]: DEBUG nova.compute.manager [req-dc33a6e1-87fa-400d-9151-799aeecad933 req-f3c4e5bb-21ff-4b77-ae0f-2078a502c022 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Received event network-vif-plugged-2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 902.450318] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc33a6e1-87fa-400d-9151-799aeecad933 req-f3c4e5bb-21ff-4b77-ae0f-2078a502c022 service nova] Acquiring lock "940e0328-970d-4f49-a102-d8a00b8c299b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.450520] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc33a6e1-87fa-400d-9151-799aeecad933 req-f3c4e5bb-21ff-4b77-ae0f-2078a502c022 service nova] Lock "940e0328-970d-4f49-a102-d8a00b8c299b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.450684] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc33a6e1-87fa-400d-9151-799aeecad933 req-f3c4e5bb-21ff-4b77-ae0f-2078a502c022 service nova] Lock "940e0328-970d-4f49-a102-d8a00b8c299b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.450847] env[68285]: DEBUG nova.compute.manager [req-dc33a6e1-87fa-400d-9151-799aeecad933 req-f3c4e5bb-21ff-4b77-ae0f-2078a502c022 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] No waiting events found dispatching network-vif-plugged-2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 902.451015] env[68285]: WARNING nova.compute.manager [req-dc33a6e1-87fa-400d-9151-799aeecad933 req-f3c4e5bb-21ff-4b77-ae0f-2078a502c022 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Received unexpected event network-vif-plugged-2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 for instance with vm_state building and task_state spawning. [ 902.556449] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "12fad42a-1011-4563-b11f-7b141b2a1670" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.556722] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "12fad42a-1011-4563-b11f-7b141b2a1670" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.556931] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "12fad42a-1011-4563-b11f-7b141b2a1670-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.557145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "12fad42a-1011-4563-b11f-7b141b2a1670-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.557314] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "12fad42a-1011-4563-b11f-7b141b2a1670-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.559549] env[68285]: INFO nova.compute.manager [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Terminating instance [ 902.584281] env[68285]: DEBUG nova.network.neutron [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Successfully updated port: 2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 902.644646] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.805237] env[68285]: DEBUG nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 902.898694] env[68285]: DEBUG nova.compute.manager [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.899817] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76addc8-9f8e-4b58-a5ea-ae9d15e72663 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.009017] env[68285]: DEBUG nova.network.neutron [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Successfully created port: 567381f7-5f78-4920-beb9-db0ef3479244 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 903.068092] env[68285]: DEBUG nova.compute.manager [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 903.068092] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 903.068092] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffb8085-98f4-41a5-8f6f-0c2eafded63a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.079932] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.080330] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cf39371-9a56-4217-b50f-4cb0d4668b12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.091218] env[68285]: DEBUG oslo_vmware.api [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 903.091218] env[68285]: value = "task-2891417" [ 903.091218] env[68285]: _type = "Task" [ 903.091218] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.091218] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "refresh_cache-940e0328-970d-4f49-a102-d8a00b8c299b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.091218] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquired lock "refresh_cache-940e0328-970d-4f49-a102-d8a00b8c299b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.091218] env[68285]: DEBUG nova.network.neutron [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.106564] env[68285]: DEBUG oslo_vmware.api [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.483727] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801a3b3b-a373-4fee-947e-dc994a4c0fe1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.493196] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354e3339-430d-4a00-b587-b99c0d37587c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.531821] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfae4d6-5e38-4fb1-8cff-4f9d85dd1e49 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.543129] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9428a57-c1e9-447e-89c2-90f42831263c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.563615] env[68285]: DEBUG nova.compute.provider_tree [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.604147] env[68285]: DEBUG oslo_vmware.api [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891417, 'name': PowerOffVM_Task, 'duration_secs': 0.292844} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.604469] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.604678] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.605284] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-328cca5b-f169-4781-8fad-521d2341e3c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.616636] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "f13ad5e7-341f-4475-b334-2144b0923e3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.616977] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.649053] env[68285]: DEBUG nova.network.neutron [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.687536] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.687782] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.687962] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Deleting the datastore file [datastore1] 12fad42a-1011-4563-b11f-7b141b2a1670 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.688540] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdc51a79-7b55-4346-b8c6-2a8ad4345614 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.697913] env[68285]: DEBUG oslo_vmware.api [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 903.697913] env[68285]: value = "task-2891419" [ 903.697913] env[68285]: _type = "Task" [ 903.697913] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.705966] env[68285]: DEBUG oslo_vmware.api [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.822231] env[68285]: DEBUG nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 903.852143] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5d08af6e-040e-4fac-974b-cfa5ed6c710a',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1109178364',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 903.852418] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.852536] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 903.852709] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.852848] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 903.852987] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 903.853296] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 903.854310] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 903.854310] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 903.854310] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 903.854310] env[68285]: DEBUG nova.virt.hardware [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 903.855812] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b696ad0-a369-4b29-b831-febfe750aafb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.865020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c830d94-19b9-4e52-a547-46c164ffce80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.923125] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1569b897-d1b8-47ec-baea-6eb6feee2f75 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.931306] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Doing hard reboot of VM {{(pid=68285) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 903.931598] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-d7be4bf6-2a28-47a9-8fbf-1efd6b142fea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.940203] env[68285]: DEBUG oslo_vmware.api [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 903.940203] env[68285]: value = "task-2891420" [ 903.940203] env[68285]: _type = "Task" [ 903.940203] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.952380] env[68285]: DEBUG oslo_vmware.api [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891420, 'name': ResetVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.972681] env[68285]: DEBUG nova.network.neutron [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Updating instance_info_cache with network_info: [{"id": "2aa2446c-f5b4-4511-9c9f-9bd2e76047a8", "address": "fa:16:3e:d2:68:80", "network": {"id": "fab946b6-8ac4-446d-9fa8-3051a38ddf60", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-584547690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd7990c4a58b4c0eb1b5a6474baefbc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aa2446c-f5", "ovs_interfaceid": "2aa2446c-f5b4-4511-9c9f-9bd2e76047a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.038345] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.039192] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.039192] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.039347] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.039858] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.045282] env[68285]: INFO nova.compute.manager [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Terminating instance [ 904.065522] env[68285]: DEBUG nova.scheduler.client.report [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.216071] env[68285]: DEBUG oslo_vmware.api [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289278} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.216538] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.216850] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.217151] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.217442] env[68285]: INFO nova.compute.manager [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Took 1.15 seconds to destroy the instance on the hypervisor. [ 904.217819] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.218597] env[68285]: DEBUG nova.compute.manager [-] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.218597] env[68285]: DEBUG nova.network.neutron [-] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.456565] env[68285]: DEBUG oslo_vmware.api [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891420, 'name': ResetVM_Task, 'duration_secs': 0.126779} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.456870] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Did hard reboot of VM {{(pid=68285) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 904.457509] env[68285]: DEBUG nova.compute.manager [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.458584] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1491be05-b9de-421d-a55d-4406004ccd43 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.475934] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Releasing lock "refresh_cache-940e0328-970d-4f49-a102-d8a00b8c299b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.476466] env[68285]: DEBUG nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Instance network_info: |[{"id": "2aa2446c-f5b4-4511-9c9f-9bd2e76047a8", "address": "fa:16:3e:d2:68:80", "network": {"id": "fab946b6-8ac4-446d-9fa8-3051a38ddf60", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-584547690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd7990c4a58b4c0eb1b5a6474baefbc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aa2446c-f5", "ovs_interfaceid": "2aa2446c-f5b4-4511-9c9f-9bd2e76047a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 904.477157] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:68:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2aa2446c-f5b4-4511-9c9f-9bd2e76047a8', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.493021] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Creating folder: Project (cd7990c4a58b4c0eb1b5a6474baefbc8). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.493021] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2c02856-d4a4-483b-9dd9-4a323fb0de15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.502462] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Created folder: Project (cd7990c4a58b4c0eb1b5a6474baefbc8) in parent group-v580775. [ 904.502710] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Creating folder: Instances. Parent ref: group-v580891. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.502983] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c1640d4-e0b0-45ae-9452-f0682e6f48ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.516207] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Created folder: Instances in parent group-v580891. [ 904.516207] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.516859] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.517166] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9bcf1fa-dd37-4961-83dd-001dcfc5c656 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.538101] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 904.538101] env[68285]: value = "task-2891423" [ 904.538101] env[68285]: _type = "Task" [ 904.538101] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.546639] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891423, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.551029] env[68285]: DEBUG nova.compute.manager [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 904.551029] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.551029] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508e19a6-2746-4efc-abe1-37aa7d6de204 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.557961] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.558273] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d93192e-856a-44f1-9d3f-026dcc243b23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.566238] env[68285]: DEBUG oslo_vmware.api [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 904.566238] env[68285]: value = "task-2891424" [ 904.566238] env[68285]: _type = "Task" [ 904.566238] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.576928] env[68285]: DEBUG oslo_vmware.api [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.579674] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.794s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.580673] env[68285]: DEBUG nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 904.582875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.303s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.584313] env[68285]: INFO nova.compute.claims [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.738763] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.739056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.973497] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cd5e1df8-5ead-418c-8f10-a1eba80fde0c tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.919s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.049834] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891423, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.078430] env[68285]: DEBUG oslo_vmware.api [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891424, 'name': PowerOffVM_Task, 'duration_secs': 0.296598} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.078725] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.078885] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.079147] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dfef7ce-f93f-4b42-86f7-ead1eb610ac6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.090214] env[68285]: DEBUG nova.compute.utils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 905.093921] env[68285]: DEBUG nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 905.093921] env[68285]: DEBUG nova.network.neutron [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 905.147225] env[68285]: DEBUG nova.policy [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fd0582abf8e4fff8e6f8316ba430988', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07b5865cc5804d8d98073e5d0c1449aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 905.151862] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.152117] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.152244] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Deleting the datastore file [datastore2] 631fe0ee-73a6-48c5-9a14-f6a00d2c2942 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.152488] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22155da7-ca2f-4b52-be65-0eac2b8e3a86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.159427] env[68285]: DEBUG oslo_vmware.api [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for the task: (returnval){ [ 905.159427] env[68285]: value = "task-2891426" [ 905.159427] env[68285]: _type = "Task" [ 905.159427] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.168353] env[68285]: DEBUG oslo_vmware.api [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891426, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.219588] env[68285]: DEBUG nova.compute.manager [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Received event network-changed-2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.219802] env[68285]: DEBUG nova.compute.manager [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Refreshing instance network info cache due to event network-changed-2aa2446c-f5b4-4511-9c9f-9bd2e76047a8. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 905.220040] env[68285]: DEBUG oslo_concurrency.lockutils [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] Acquiring lock "refresh_cache-940e0328-970d-4f49-a102-d8a00b8c299b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.220189] env[68285]: DEBUG oslo_concurrency.lockutils [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] Acquired lock "refresh_cache-940e0328-970d-4f49-a102-d8a00b8c299b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.220348] env[68285]: DEBUG nova.network.neutron [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Refreshing network info cache for port 2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.486708] env[68285]: DEBUG nova.network.neutron [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Successfully created port: 0a2792b7-2c8a-4e2e-b434-f8073b771b2d {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 905.553107] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891423, 'name': CreateVM_Task, 'duration_secs': 0.55639} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.553275] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.556827] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.556827] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.556827] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 905.556827] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3874307f-0187-4723-ad8e-14642a82f8cf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.560323] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 905.560323] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b7ea68-cf48-d2cd-b83b-79f74146162c" [ 905.560323] env[68285]: _type = "Task" [ 905.560323] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.571405] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b7ea68-cf48-d2cd-b83b-79f74146162c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.594599] env[68285]: DEBUG nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 905.617694] env[68285]: DEBUG nova.network.neutron [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Successfully updated port: 567381f7-5f78-4920-beb9-db0ef3479244 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 905.672493] env[68285]: DEBUG oslo_vmware.api [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Task: {'id': task-2891426, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184174} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.672927] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 905.673200] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 905.673523] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 905.673647] env[68285]: INFO nova.compute.manager [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Took 1.12 seconds to destroy the instance on the hypervisor. [ 905.673930] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.677329] env[68285]: DEBUG nova.compute.manager [-] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 905.677491] env[68285]: DEBUG nova.network.neutron [-] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.046765] env[68285]: DEBUG nova.network.neutron [-] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.078511] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b7ea68-cf48-d2cd-b83b-79f74146162c, 'name': SearchDatastore_Task, 'duration_secs': 0.009695} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.078892] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.079152] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.079431] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.079848] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.079848] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.080606] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42570587-b0f8-45a5-82bc-190377414da8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.088622] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.088738] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.089700] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38a2769c-41e1-4615-aab0-335d118079fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.097071] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 906.097071] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527e3d3f-9ce9-912c-45bb-1ea1f4dabdf6" [ 906.097071] env[68285]: _type = "Task" [ 906.097071] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.109956] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "5b58896c-cb07-48c8-ace0-385486a3e19d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.110342] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.110632] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "5b58896c-cb07-48c8-ace0-385486a3e19d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.110898] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.111176] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.118950] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527e3d3f-9ce9-912c-45bb-1ea1f4dabdf6, 'name': SearchDatastore_Task, 'duration_secs': 0.009212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.119554] env[68285]: INFO nova.compute.manager [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Terminating instance [ 906.122193] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.122415] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.122977] env[68285]: DEBUG nova.network.neutron [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.126212] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbb0341e-bf6c-47e3-8cfb-add787c77559 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.137175] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 906.137175] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c702a4-4513-30a3-e174-9be96ed6e81a" [ 906.137175] env[68285]: _type = "Task" [ 906.137175] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.148597] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c702a4-4513-30a3-e174-9be96ed6e81a, 'name': SearchDatastore_Task, 'duration_secs': 0.008749} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.153329] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.154016] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 940e0328-970d-4f49-a102-d8a00b8c299b/940e0328-970d-4f49-a102-d8a00b8c299b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.155605] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c916c42d-61fc-4933-8e59-80a6e7d2b955 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.165378] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 906.165378] env[68285]: value = "task-2891427" [ 906.165378] env[68285]: _type = "Task" [ 906.165378] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.178237] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.212026] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0712d2-2dad-44f2-a9f7-1bfee065c422 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.217991] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9696f489-3e21-439c-86cf-b549fe9f4525 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.249444] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e523d0ec-29c1-493b-8484-d72d4fa43e60 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.260013] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ee4d43-6eac-42a9-bb9a-231bbb9aea10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.274433] env[68285]: DEBUG nova.compute.provider_tree [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.556416] env[68285]: INFO nova.compute.manager [-] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Took 2.34 seconds to deallocate network for instance. [ 906.587984] env[68285]: DEBUG nova.network.neutron [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Updated VIF entry in instance network info cache for port 2aa2446c-f5b4-4511-9c9f-9bd2e76047a8. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 906.587984] env[68285]: DEBUG nova.network.neutron [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Updating instance_info_cache with network_info: [{"id": "2aa2446c-f5b4-4511-9c9f-9bd2e76047a8", "address": "fa:16:3e:d2:68:80", "network": {"id": "fab946b6-8ac4-446d-9fa8-3051a38ddf60", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-584547690-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd7990c4a58b4c0eb1b5a6474baefbc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2aa2446c-f5", "ovs_interfaceid": "2aa2446c-f5b4-4511-9c9f-9bd2e76047a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.614338] env[68285]: DEBUG nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 906.634104] env[68285]: DEBUG nova.compute.manager [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 906.634315] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.635874] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02234d07-f07e-4d47-9743-f3ffae6aaf26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.644280] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.644280] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1b82080-ada8-4f8c-918b-d97fa427d706 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.649922] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 906.650167] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.650335] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 906.650495] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.651027] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 906.651027] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 906.651027] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 906.651260] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 906.651328] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 906.651447] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 906.652315] env[68285]: DEBUG nova.virt.hardware [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 906.652685] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3208bd9-2ac1-4fba-a983-2c007667fca3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.656932] env[68285]: DEBUG oslo_vmware.api [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 906.656932] env[68285]: value = "task-2891428" [ 906.656932] env[68285]: _type = "Task" [ 906.656932] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.666127] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f5364e-2454-4b6f-9876-737d95856a29 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.676717] env[68285]: DEBUG oslo_vmware.api [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.693778] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891427, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441116} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.694047] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 940e0328-970d-4f49-a102-d8a00b8c299b/940e0328-970d-4f49-a102-d8a00b8c299b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 906.694336] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 906.694498] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4531a7e-92fe-4a93-956f-d9f8e02b9a1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.700518] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 906.700518] env[68285]: value = "task-2891429" [ 906.700518] env[68285]: _type = "Task" [ 906.700518] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.708452] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.709240] env[68285]: DEBUG nova.network.neutron [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.778309] env[68285]: DEBUG nova.scheduler.client.report [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.997209] env[68285]: DEBUG nova.network.neutron [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Successfully updated port: 0a2792b7-2c8a-4e2e-b434-f8073b771b2d {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 907.066130] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.092374] env[68285]: DEBUG oslo_concurrency.lockutils [req-97fb0d59-c3ae-4922-9acf-8d633d44fda0 req-d7ab2c3e-5d1d-4072-a50d-af91b1f95048 service nova] Releasing lock "refresh_cache-940e0328-970d-4f49-a102-d8a00b8c299b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.106038] env[68285]: DEBUG nova.network.neutron [-] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.169080] env[68285]: DEBUG oslo_vmware.api [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891428, 'name': PowerOffVM_Task, 'duration_secs': 0.267799} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.169442] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.169603] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.169888] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a39fc1a6-2007-4d89-9fd3-80e611d01563 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.210372] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131918} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.211445] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 907.212275] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2826c51f-2c60-4d1d-b9f5-e157c383b70f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.242741] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 940e0328-970d-4f49-a102-d8a00b8c299b/940e0328-970d-4f49-a102-d8a00b8c299b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.243092] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1a66cc6-7125-4d53-99df-15c9613154fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.261116] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.261733] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.261733] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Deleting the datastore file [datastore1] 5b58896c-cb07-48c8-ace0-385486a3e19d {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.261863] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2055126-df13-464f-a057-17b45497edbd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.265898] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 907.265898] env[68285]: value = "task-2891431" [ 907.265898] env[68285]: _type = "Task" [ 907.265898] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.270955] env[68285]: DEBUG oslo_vmware.api [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for the task: (returnval){ [ 907.270955] env[68285]: value = "task-2891432" [ 907.270955] env[68285]: _type = "Task" [ 907.270955] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.278738] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891431, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.281690] env[68285]: DEBUG oslo_vmware.api [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.283510] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.283937] env[68285]: DEBUG nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 907.286654] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.424s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.291492] env[68285]: INFO nova.compute.claims [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.320581] env[68285]: DEBUG nova.network.neutron [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance_info_cache with network_info: [{"id": "567381f7-5f78-4920-beb9-db0ef3479244", "address": "fa:16:3e:68:fd:5e", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap567381f7-5f", "ovs_interfaceid": "567381f7-5f78-4920-beb9-db0ef3479244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.507861] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "refresh_cache-5266817c-ce3b-4c96-a3bd-32b631c29b81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.507861] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "refresh_cache-5266817c-ce3b-4c96-a3bd-32b631c29b81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.507861] env[68285]: DEBUG nova.network.neutron [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.609252] env[68285]: INFO nova.compute.manager [-] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Took 1.93 seconds to deallocate network for instance. [ 907.728250] env[68285]: DEBUG nova.compute.manager [req-793d94e6-9913-4e05-a7b0-205744124c97 req-11dacd2e-2401-49cc-82d7-257dd6a01633 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Received event network-vif-plugged-0a2792b7-2c8a-4e2e-b434-f8073b771b2d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.728476] env[68285]: DEBUG oslo_concurrency.lockutils [req-793d94e6-9913-4e05-a7b0-205744124c97 req-11dacd2e-2401-49cc-82d7-257dd6a01633 service nova] Acquiring lock "5266817c-ce3b-4c96-a3bd-32b631c29b81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.728682] env[68285]: DEBUG oslo_concurrency.lockutils [req-793d94e6-9913-4e05-a7b0-205744124c97 req-11dacd2e-2401-49cc-82d7-257dd6a01633 service nova] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.728847] env[68285]: DEBUG oslo_concurrency.lockutils [req-793d94e6-9913-4e05-a7b0-205744124c97 req-11dacd2e-2401-49cc-82d7-257dd6a01633 service nova] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.729018] env[68285]: DEBUG nova.compute.manager [req-793d94e6-9913-4e05-a7b0-205744124c97 req-11dacd2e-2401-49cc-82d7-257dd6a01633 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] No waiting events found dispatching network-vif-plugged-0a2792b7-2c8a-4e2e-b434-f8073b771b2d {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 907.729184] env[68285]: WARNING nova.compute.manager [req-793d94e6-9913-4e05-a7b0-205744124c97 req-11dacd2e-2401-49cc-82d7-257dd6a01633 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Received unexpected event network-vif-plugged-0a2792b7-2c8a-4e2e-b434-f8073b771b2d for instance with vm_state building and task_state spawning. [ 907.780504] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.785802] env[68285]: DEBUG oslo_vmware.api [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Task: {'id': task-2891432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169899} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.786071] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.786425] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.786425] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.786694] env[68285]: INFO nova.compute.manager [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 907.787637] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.787637] env[68285]: DEBUG nova.compute.manager [-] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 907.787637] env[68285]: DEBUG nova.network.neutron [-] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 907.797472] env[68285]: DEBUG nova.compute.utils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 907.800228] env[68285]: DEBUG nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 907.800228] env[68285]: DEBUG nova.network.neutron [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.823423] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.823755] env[68285]: DEBUG nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Instance network_info: |[{"id": "567381f7-5f78-4920-beb9-db0ef3479244", "address": "fa:16:3e:68:fd:5e", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap567381f7-5f", "ovs_interfaceid": "567381f7-5f78-4920-beb9-db0ef3479244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 907.824187] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:fd:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '567381f7-5f78-4920-beb9-db0ef3479244', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.831802] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.832376] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.832611] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d8d0872-075c-45ef-93c0-940835152760 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.848533] env[68285]: DEBUG nova.policy [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64faebf5ce1549fe938f12248656d8d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2727048b316143c7bfa2aef4f9b264f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 907.856038] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.856038] env[68285]: value = "task-2891433" [ 907.856038] env[68285]: _type = "Task" [ 907.856038] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.873442] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891433, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.996582] env[68285]: DEBUG nova.compute.manager [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Received event network-vif-plugged-567381f7-5f78-4920-beb9-db0ef3479244 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.996802] env[68285]: DEBUG oslo_concurrency.lockutils [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] Acquiring lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.997021] env[68285]: DEBUG oslo_concurrency.lockutils [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.997193] env[68285]: DEBUG oslo_concurrency.lockutils [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.997361] env[68285]: DEBUG nova.compute.manager [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] No waiting events found dispatching network-vif-plugged-567381f7-5f78-4920-beb9-db0ef3479244 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 907.997557] env[68285]: WARNING nova.compute.manager [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Received unexpected event network-vif-plugged-567381f7-5f78-4920-beb9-db0ef3479244 for instance with vm_state building and task_state spawning. [ 907.998416] env[68285]: DEBUG nova.compute.manager [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Received event network-vif-deleted-ccc388b0-7423-4892-ac70-e4d86b1a0f17 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.998416] env[68285]: DEBUG nova.compute.manager [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Received event network-changed-567381f7-5f78-4920-beb9-db0ef3479244 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.998416] env[68285]: DEBUG nova.compute.manager [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Refreshing instance network info cache due to event network-changed-567381f7-5f78-4920-beb9-db0ef3479244. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 907.998416] env[68285]: DEBUG oslo_concurrency.lockutils [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] Acquiring lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.998416] env[68285]: DEBUG oslo_concurrency.lockutils [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] Acquired lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.998670] env[68285]: DEBUG nova.network.neutron [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Refreshing network info cache for port 567381f7-5f78-4920-beb9-db0ef3479244 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 908.119743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.138031] env[68285]: DEBUG nova.network.neutron [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.157087] env[68285]: DEBUG nova.network.neutron [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Successfully created port: 001d3974-db8e-494c-b536-d7415394a0e5 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 908.282616] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891431, 'name': ReconfigVM_Task, 'duration_secs': 0.560341} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.282922] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 940e0328-970d-4f49-a102-d8a00b8c299b/940e0328-970d-4f49-a102-d8a00b8c299b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.286901] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-534a47bb-c8f9-422b-8bf5-f8a60f8e56fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.294063] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 908.294063] env[68285]: value = "task-2891434" [ 908.294063] env[68285]: _type = "Task" [ 908.294063] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.308477] env[68285]: DEBUG nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 908.316370] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891434, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.379262] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891433, 'name': CreateVM_Task, 'duration_secs': 0.313953} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.382288] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 908.382908] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.383082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.383383] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 908.383893] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecdf6e41-9891-46f5-bcf6-51ab1947519f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.389053] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 908.389053] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d9d75c-ee44-6480-a788-2e1ec4a1c659" [ 908.389053] env[68285]: _type = "Task" [ 908.389053] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.402045] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d9d75c-ee44-6480-a788-2e1ec4a1c659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.649617] env[68285]: DEBUG nova.network.neutron [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Updating instance_info_cache with network_info: [{"id": "0a2792b7-2c8a-4e2e-b434-f8073b771b2d", "address": "fa:16:3e:2c:5b:5e", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a2792b7-2c", "ovs_interfaceid": "0a2792b7-2c8a-4e2e-b434-f8073b771b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.733126] env[68285]: DEBUG nova.network.neutron [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updated VIF entry in instance network info cache for port 567381f7-5f78-4920-beb9-db0ef3479244. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.733455] env[68285]: DEBUG nova.network.neutron [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance_info_cache with network_info: [{"id": "567381f7-5f78-4920-beb9-db0ef3479244", "address": "fa:16:3e:68:fd:5e", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap567381f7-5f", "ovs_interfaceid": "567381f7-5f78-4920-beb9-db0ef3479244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.810540] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891434, 'name': Rename_Task, 'duration_secs': 0.379016} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.812421] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 908.812421] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e926f63-9c09-4235-b8c0-36815ef3b226 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.824409] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 908.824409] env[68285]: value = "task-2891435" [ 908.824409] env[68285]: _type = "Task" [ 908.824409] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.835250] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891435, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.900107] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d9d75c-ee44-6480-a788-2e1ec4a1c659, 'name': SearchDatastore_Task, 'duration_secs': 0.228934} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.900453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.900749] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 908.901029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.901214] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.901431] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 908.901727] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fc5861e-88ec-48ad-9fa1-f11e4cacc7d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.908531] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956219ca-b91f-4973-bc52-39b35a5d5436 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.913197] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 908.913197] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 908.914213] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbf822d-9346-46aa-a767-96ac7f71a692 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.919918] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d4fd0f-12da-49fe-8509-8029a654220b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.926019] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 908.926019] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5216eda7-e3e9-110a-9eec-01897add3fbc" [ 908.926019] env[68285]: _type = "Task" [ 908.926019] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.956634] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15599e6d-d67e-4198-bd1c-08b0cee25eff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.965557] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5216eda7-e3e9-110a-9eec-01897add3fbc, 'name': SearchDatastore_Task, 'duration_secs': 0.009107} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.967009] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a02139d-dacd-4127-a84e-ec706b5264d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.975684] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa4ef6c-e155-44ab-9d60-3cba925ce902 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.982058] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 908.982058] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52199468-bcee-24f2-3b99-e497f8d8b107" [ 908.982058] env[68285]: _type = "Task" [ 908.982058] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.995899] env[68285]: DEBUG nova.compute.provider_tree [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 909.006105] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52199468-bcee-24f2-3b99-e497f8d8b107, 'name': SearchDatastore_Task, 'duration_secs': 0.012908} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.006388] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.006689] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 909.006934] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e376e87d-7c43-4211-9660-de4b6cbdb990 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.014744] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 909.014744] env[68285]: value = "task-2891436" [ 909.014744] env[68285]: _type = "Task" [ 909.014744] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.023249] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.270518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.270518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.270518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "3e656d8d-bd06-4886-9424-4ed76b98aae9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.270518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.270518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.270518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "refresh_cache-5266817c-ce3b-4c96-a3bd-32b631c29b81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.270518] env[68285]: DEBUG nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Instance network_info: |[{"id": "0a2792b7-2c8a-4e2e-b434-f8073b771b2d", "address": "fa:16:3e:2c:5b:5e", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a2792b7-2c", "ovs_interfaceid": "0a2792b7-2c8a-4e2e-b434-f8073b771b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 909.270518] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:5b:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a2792b7-2c8a-4e2e-b434-f8073b771b2d', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.270518] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating folder: Project (07b5865cc5804d8d98073e5d0c1449aa). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 909.270518] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6002ab16-ad23-4da6-ac34-3960da8a69ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.270518] env[68285]: DEBUG oslo_concurrency.lockutils [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] Releasing lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.271753] env[68285]: DEBUG nova.compute.manager [req-ed8c36a8-f0af-4c76-a172-a110eee9e5f2 req-1834e554-a0f2-4a6e-837f-ba137b99b585 service nova] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Received event network-vif-deleted-0a48ba66-2ec4-4dfe-94f1-ea0a9f494005 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.271753] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created folder: Project (07b5865cc5804d8d98073e5d0c1449aa) in parent group-v580775. [ 909.271753] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating folder: Instances. Parent ref: group-v580895. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 909.275022] env[68285]: INFO nova.compute.manager [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Terminating instance [ 909.275022] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e1074f9-d495-4d6b-8aa5-28f821333339 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.286844] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created folder: Instances in parent group-v580895. [ 909.287096] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 909.287351] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 909.287601] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76564cb7-84be-4d94-a3a8-7cc71da50df9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.310902] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.310902] env[68285]: value = "task-2891439" [ 909.310902] env[68285]: _type = "Task" [ 909.310902] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.322800] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891439, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.322800] env[68285]: DEBUG nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 909.345701] env[68285]: DEBUG oslo_vmware.api [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891435, 'name': PowerOnVM_Task, 'duration_secs': 0.502359} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.345701] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.345701] env[68285]: INFO nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Took 8.22 seconds to spawn the instance on the hypervisor. [ 909.345701] env[68285]: DEBUG nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 909.345946] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2520d6c-517c-4ef5-8e99-81a4f73a4be3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.371308] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 909.371636] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.371838] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 909.372378] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.372596] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 909.372833] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 909.372978] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 909.373186] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 909.373365] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 909.373539] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 909.373955] env[68285]: DEBUG nova.virt.hardware [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 909.375281] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4b886f-0eea-46f5-8fbc-e55a3c18fdb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.393492] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a57e630-bbe8-4cca-8097-f3e75e53a41c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.527681] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891436, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.532043] env[68285]: ERROR nova.scheduler.client.report [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [req-561a4ed2-ee4a-4a3b-9245-2a99bc1f39e4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-561a4ed2-ee4a-4a3b-9245-2a99bc1f39e4"}]} [ 909.552085] env[68285]: DEBUG nova.scheduler.client.report [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 909.573406] env[68285]: DEBUG nova.scheduler.client.report [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 909.573645] env[68285]: DEBUG nova.compute.provider_tree [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 909.593434] env[68285]: DEBUG nova.scheduler.client.report [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 909.614807] env[68285]: DEBUG nova.scheduler.client.report [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 909.759309] env[68285]: DEBUG nova.network.neutron [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Successfully updated port: 001d3974-db8e-494c-b536-d7415394a0e5 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 909.773981] env[68285]: DEBUG nova.network.neutron [-] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.782222] env[68285]: DEBUG nova.compute.manager [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.782538] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.783931] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26359ac8-d05e-4e6d-a428-f78f78684269 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.798752] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.799769] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04e788f8-0e4d-4939-aeba-6212dcc21209 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.809404] env[68285]: DEBUG oslo_vmware.api [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 909.809404] env[68285]: value = "task-2891440" [ 909.809404] env[68285]: _type = "Task" [ 909.809404] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.831719] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891439, 'name': CreateVM_Task, 'duration_secs': 0.381847} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.832092] env[68285]: DEBUG oslo_vmware.api [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.832197] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 909.833112] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.833355] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.833787] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 909.834394] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d93e20-9bf6-4a02-936f-a0551e30d43c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.844153] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 909.844153] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f1a1b0-94d6-0349-9958-9ac4eb29c5d6" [ 909.844153] env[68285]: _type = "Task" [ 909.844153] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.856605] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f1a1b0-94d6-0349-9958-9ac4eb29c5d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.869713] env[68285]: INFO nova.compute.manager [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Took 50.02 seconds to build instance. [ 909.908183] env[68285]: DEBUG nova.compute.manager [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Received event network-changed-0a2792b7-2c8a-4e2e-b434-f8073b771b2d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.908394] env[68285]: DEBUG nova.compute.manager [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Refreshing instance network info cache due to event network-changed-0a2792b7-2c8a-4e2e-b434-f8073b771b2d. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 909.908655] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] Acquiring lock "refresh_cache-5266817c-ce3b-4c96-a3bd-32b631c29b81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.908780] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] Acquired lock "refresh_cache-5266817c-ce3b-4c96-a3bd-32b631c29b81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.908943] env[68285]: DEBUG nova.network.neutron [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Refreshing network info cache for port 0a2792b7-2c8a-4e2e-b434-f8073b771b2d {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 910.027631] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530845} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.027904] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.028146] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 910.028382] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-590ceb85-5174-4f13-acaf-6ee592156c69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.034620] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 910.034620] env[68285]: value = "task-2891441" [ 910.034620] env[68285]: _type = "Task" [ 910.034620] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.044662] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891441, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.218715] env[68285]: DEBUG nova.compute.manager [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Received event network-changed-f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 910.218715] env[68285]: DEBUG nova.compute.manager [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Refreshing instance network info cache due to event network-changed-f41109eb-9884-4723-8695-fdaae26703db. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 910.218940] env[68285]: DEBUG oslo_concurrency.lockutils [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] Acquiring lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.219053] env[68285]: DEBUG oslo_concurrency.lockutils [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] Acquired lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.219212] env[68285]: DEBUG nova.network.neutron [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Refreshing network info cache for port f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 910.224563] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7fe302-e70d-47a9-a7cb-205484dedbaf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.232985] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6af8d7d-ee63-4fee-8df2-18594fae6c51 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.267737] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-d1b5abfa-fd38-4d17-b75f-5036af841d24" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.267833] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-d1b5abfa-fd38-4d17-b75f-5036af841d24" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.267939] env[68285]: DEBUG nova.network.neutron [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 910.272845] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10e2fab-98c5-4170-8340-fce2a6ac8382 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.280320] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a000735a-1616-4ebc-883c-4fae575844e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.285127] env[68285]: INFO nova.compute.manager [-] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Took 2.50 seconds to deallocate network for instance. [ 910.298823] env[68285]: DEBUG nova.compute.provider_tree [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.321678] env[68285]: DEBUG oslo_vmware.api [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891440, 'name': PowerOffVM_Task, 'duration_secs': 0.241246} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.321970] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.322156] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.322410] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3dec285-8fed-481f-ac50-4f9c49d8e1c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.355198] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f1a1b0-94d6-0349-9958-9ac4eb29c5d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010872} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.355570] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.355807] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.356106] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.356265] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.356450] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.356786] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1435bd2-0073-427f-93d7-de77d14dead7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.371172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45566bf5-99d1-473a-9731-3458c28e2fd2 tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "940e0328-970d-4f49-a102-d8a00b8c299b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.501s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.371635] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.371635] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.373300] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0647fc50-8455-4954-8482-a9bfba203e86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.379172] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 910.379172] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e6affb-4d40-4092-4ff0-aca1a932338d" [ 910.379172] env[68285]: _type = "Task" [ 910.379172] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.386898] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e6affb-4d40-4092-4ff0-aca1a932338d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.392892] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.393121] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.393305] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Deleting the datastore file [datastore2] 3e656d8d-bd06-4886-9424-4ed76b98aae9 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.393558] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4189b79-dc51-447e-9afd-56b73bcdecb5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.401063] env[68285]: DEBUG oslo_vmware.api [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 910.401063] env[68285]: value = "task-2891443" [ 910.401063] env[68285]: _type = "Task" [ 910.401063] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.417272] env[68285]: DEBUG oslo_vmware.api [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891443, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.544485] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891441, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094162} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.546918] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.547789] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4bf45d-3e88-4be2-83f3-19274ce5ea15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.571952] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.572270] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83d95a56-80b6-48cd-9f41-bf12aba0b426 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.593331] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 910.593331] env[68285]: value = "task-2891444" [ 910.593331] env[68285]: _type = "Task" [ 910.593331] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.601741] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.654617] env[68285]: DEBUG nova.network.neutron [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Updated VIF entry in instance network info cache for port 0a2792b7-2c8a-4e2e-b434-f8073b771b2d. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.654978] env[68285]: DEBUG nova.network.neutron [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Updating instance_info_cache with network_info: [{"id": "0a2792b7-2c8a-4e2e-b434-f8073b771b2d", "address": "fa:16:3e:2c:5b:5e", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a2792b7-2c", "ovs_interfaceid": "0a2792b7-2c8a-4e2e-b434-f8073b771b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.791574] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.801939] env[68285]: DEBUG nova.scheduler.client.report [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.807579] env[68285]: DEBUG nova.network.neutron [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.878133] env[68285]: DEBUG nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 910.893857] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e6affb-4d40-4092-4ff0-aca1a932338d, 'name': SearchDatastore_Task, 'duration_secs': 0.045794} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.894659] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da1cb88a-9ec7-4a60-81a4-23ecc99fb441 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.901743] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 910.901743] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5256a082-f3cd-2386-c990-4430ef15034e" [ 910.901743] env[68285]: _type = "Task" [ 910.901743] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.919604] env[68285]: DEBUG oslo_vmware.api [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891443, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.925651] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5256a082-f3cd-2386-c990-4430ef15034e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.961902] env[68285]: DEBUG nova.network.neutron [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updated VIF entry in instance network info cache for port f41109eb-9884-4723-8695-fdaae26703db. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.962393] env[68285]: DEBUG nova.network.neutron [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updating instance_info_cache with network_info: [{"id": "f41109eb-9884-4723-8695-fdaae26703db", "address": "fa:16:3e:d2:d7:99", "network": {"id": "24216eb9-67ca-4587-9dce-0239c567b87e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-463162314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0df6f9cd11e4cbea0a5d25e546ade05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf41109eb-98", "ovs_interfaceid": "f41109eb-9884-4723-8695-fdaae26703db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.056741] env[68285]: DEBUG nova.network.neutron [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Updating instance_info_cache with network_info: [{"id": "001d3974-db8e-494c-b536-d7415394a0e5", "address": "fa:16:3e:ae:9a:77", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap001d3974-db", "ovs_interfaceid": "001d3974-db8e-494c-b536-d7415394a0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.107133] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.157684] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] Releasing lock "refresh_cache-5266817c-ce3b-4c96-a3bd-32b631c29b81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.157960] env[68285]: DEBUG nova.compute.manager [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Received event network-vif-plugged-001d3974-db8e-494c-b536-d7415394a0e5 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.158171] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] Acquiring lock "d1b5abfa-fd38-4d17-b75f-5036af841d24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.158371] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.158980] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.158980] env[68285]: DEBUG nova.compute.manager [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] No waiting events found dispatching network-vif-plugged-001d3974-db8e-494c-b536-d7415394a0e5 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.158980] env[68285]: WARNING nova.compute.manager [req-dc2715ee-0d12-45fc-9404-4e98de08922d req-7e1383b5-af36-487a-8f58-7ea6788a0cf2 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Received unexpected event network-vif-plugged-001d3974-db8e-494c-b536-d7415394a0e5 for instance with vm_state building and task_state spawning. [ 911.313597] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.026s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.313908] env[68285]: DEBUG nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.316205] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.539s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.317664] env[68285]: INFO nova.compute.claims [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.415029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.433176] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5256a082-f3cd-2386-c990-4430ef15034e, 'name': SearchDatastore_Task, 'duration_secs': 0.033449} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.433429] env[68285]: DEBUG oslo_vmware.api [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891443, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.610953} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.433658] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.433914] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5266817c-ce3b-4c96-a3bd-32b631c29b81/5266817c-ce3b-4c96-a3bd-32b631c29b81.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.434251] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.434362] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.434537] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.434707] env[68285]: INFO nova.compute.manager [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Took 1.65 seconds to destroy the instance on the hypervisor. [ 911.434949] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.435149] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df5e88eb-4b78-41bd-afac-129504a9f541 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.437407] env[68285]: DEBUG nova.compute.manager [-] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.437554] env[68285]: DEBUG nova.network.neutron [-] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.444398] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 911.444398] env[68285]: value = "task-2891445" [ 911.444398] env[68285]: _type = "Task" [ 911.444398] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.451716] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.465771] env[68285]: DEBUG oslo_concurrency.lockutils [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] Releasing lock "refresh_cache-3e656d8d-bd06-4886-9424-4ed76b98aae9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.465915] env[68285]: DEBUG nova.compute.manager [req-cc2de37c-ab95-4840-8c1e-6de886194b98 req-85daa054-f3f6-47ea-a850-56d61e0bd63f service nova] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Received event network-vif-deleted-d13fdc9f-ab41-435a-8bd4-080dbc090832 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.543899] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-d1b5abfa-fd38-4d17-b75f-5036af841d24" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.544317] env[68285]: DEBUG nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Instance network_info: |[{"id": "001d3974-db8e-494c-b536-d7415394a0e5", "address": "fa:16:3e:ae:9a:77", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap001d3974-db", "ovs_interfaceid": "001d3974-db8e-494c-b536-d7415394a0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 911.545533] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:9a:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '001d3974-db8e-494c-b536-d7415394a0e5', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.556788] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating folder: Project (2727048b316143c7bfa2aef4f9b264f2). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 911.557304] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1eabba3-2e1f-4b20-9336-476336e75fb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.572023] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created folder: Project (2727048b316143c7bfa2aef4f9b264f2) in parent group-v580775. [ 911.572023] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating folder: Instances. Parent ref: group-v580898. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 911.572023] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3142de4-9fe5-415d-bf20-6af90fcef9bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.582298] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created folder: Instances in parent group-v580898. [ 911.582298] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.582298] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.582298] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0d4360e-f332-49d6-b76b-c905d78a13bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.608942] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891444, 'name': ReconfigVM_Task, 'duration_secs': 0.586471} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.610830] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfigured VM instance instance-0000002b to attach disk [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.611166] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.611166] env[68285]: value = "task-2891448" [ 911.611166] env[68285]: _type = "Task" [ 911.611166] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.611419] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91293ccc-944b-4e22-bbfd-bae4e3fb7652 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.621829] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891448, 'name': CreateVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.623498] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 911.623498] env[68285]: value = "task-2891449" [ 911.623498] env[68285]: _type = "Task" [ 911.623498] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.822588] env[68285]: DEBUG nova.compute.utils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.830801] env[68285]: DEBUG nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.830989] env[68285]: DEBUG nova.network.neutron [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.896293] env[68285]: DEBUG nova.policy [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70340a4cc3df49ff971f299e439a1581', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d03ce152e74cec8910b12d34ad8ba6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.954587] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891445, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.123686] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891448, 'name': CreateVM_Task, 'duration_secs': 0.430588} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.124473] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.128622] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.129082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.129168] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 912.129408] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-763f4c52-a587-4e38-ae84-c3c6c5130ec7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.136235] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891449, 'name': Rename_Task, 'duration_secs': 0.467125} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.137533] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 912.137938] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 912.137938] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52032880-4cca-7ace-3e20-6b5d348daa34" [ 912.137938] env[68285]: _type = "Task" [ 912.137938] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.138205] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9344e932-eb5f-4616-9ef6-941a8b071dd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.150928] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52032880-4cca-7ace-3e20-6b5d348daa34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.151051] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 912.151051] env[68285]: value = "task-2891450" [ 912.151051] env[68285]: _type = "Task" [ 912.151051] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.159297] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.284991] env[68285]: DEBUG nova.network.neutron [-] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.288128] env[68285]: DEBUG nova.network.neutron [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Successfully created port: 7e46bb86-86a9-4e35-8965-1477f6e7b8af {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.320444] env[68285]: DEBUG nova.compute.manager [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Received event network-changed-001d3974-db8e-494c-b536-d7415394a0e5 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 912.324290] env[68285]: DEBUG nova.compute.manager [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Refreshing instance network info cache due to event network-changed-001d3974-db8e-494c-b536-d7415394a0e5. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 912.324290] env[68285]: DEBUG oslo_concurrency.lockutils [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] Acquiring lock "refresh_cache-d1b5abfa-fd38-4d17-b75f-5036af841d24" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.324290] env[68285]: DEBUG oslo_concurrency.lockutils [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] Acquired lock "refresh_cache-d1b5abfa-fd38-4d17-b75f-5036af841d24" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.324290] env[68285]: DEBUG nova.network.neutron [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Refreshing network info cache for port 001d3974-db8e-494c-b536-d7415394a0e5 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 912.331010] env[68285]: DEBUG nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.460254] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536466} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.460509] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5266817c-ce3b-4c96-a3bd-32b631c29b81/5266817c-ce3b-4c96-a3bd-32b631c29b81.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 912.460740] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 912.461086] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a583a89a-60b9-4ce5-aba5-bae579f72361 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.471293] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 912.471293] env[68285]: value = "task-2891451" [ 912.471293] env[68285]: _type = "Task" [ 912.471293] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.483161] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.654208] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52032880-4cca-7ace-3e20-6b5d348daa34, 'name': SearchDatastore_Task, 'duration_secs': 0.030718} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.660383] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.660629] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.660859] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.661009] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.661192] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.661736] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dfb9205-21ec-4137-ba82-da79026e568d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.668736] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891450, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.675884] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.676077] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.676808] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19839a58-150e-474b-aeb1-57eb6cd9042a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.682090] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 912.682090] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529f4dd1-2302-2b3e-8475-cdea26d3a51b" [ 912.682090] env[68285]: _type = "Task" [ 912.682090] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.692301] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529f4dd1-2302-2b3e-8475-cdea26d3a51b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.793629] env[68285]: INFO nova.compute.manager [-] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Took 1.36 seconds to deallocate network for instance. [ 912.853128] env[68285]: DEBUG nova.compute.manager [req-05161470-693f-4a5f-8610-ed09494020ec req-232b99a8-2843-4fd5-886d-78c9df0503f4 service nova] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Received event network-vif-deleted-f41109eb-9884-4723-8695-fdaae26703db {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 912.916876] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cc5b08-39ed-4fd4-b5f4-828e925c018e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.924940] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af3c78d-9b72-4191-b168-b4b04c296a1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.960576] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb7ad4c-b979-48bc-ae45-68bf0d76c9cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.968963] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18debf55-23e5-49b8-9e10-95f4df2ba141 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.979825] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116308} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.990955] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 912.991519] env[68285]: DEBUG nova.compute.provider_tree [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.993300] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa75a3c-bc57-4581-9953-99c614938372 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.017381] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 5266817c-ce3b-4c96-a3bd-32b631c29b81/5266817c-ce3b-4c96-a3bd-32b631c29b81.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.018391] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5137b5e7-c0a1-41bd-b2b3-49025c28f1bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.042310] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 913.042310] env[68285]: value = "task-2891452" [ 913.042310] env[68285]: _type = "Task" [ 913.042310] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.051482] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891452, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.136224] env[68285]: DEBUG nova.network.neutron [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Updated VIF entry in instance network info cache for port 001d3974-db8e-494c-b536-d7415394a0e5. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.136657] env[68285]: DEBUG nova.network.neutron [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Updating instance_info_cache with network_info: [{"id": "001d3974-db8e-494c-b536-d7415394a0e5", "address": "fa:16:3e:ae:9a:77", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap001d3974-db", "ovs_interfaceid": "001d3974-db8e-494c-b536-d7415394a0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.165400] env[68285]: DEBUG oslo_vmware.api [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891450, 'name': PowerOnVM_Task, 'duration_secs': 0.958725} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.165400] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 913.165400] env[68285]: INFO nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Took 9.34 seconds to spawn the instance on the hypervisor. [ 913.165620] env[68285]: DEBUG nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 913.166390] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ab4e74-0cd9-4f62-9f2d-014c09de977a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.194096] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529f4dd1-2302-2b3e-8475-cdea26d3a51b, 'name': SearchDatastore_Task, 'duration_secs': 0.016704} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.194096] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e62d745c-366f-4e4c-81ee-2e3e3a4fe05a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.200218] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 913.200218] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5260c57b-aee4-346d-afb5-707c39a5ae9f" [ 913.200218] env[68285]: _type = "Task" [ 913.200218] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.208457] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5260c57b-aee4-346d-afb5-707c39a5ae9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.304074] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.359997] env[68285]: DEBUG nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.386281] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.386539] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.386696] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.386875] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.387028] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.387179] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.387387] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.387613] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.387814] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.387979] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.388164] env[68285]: DEBUG nova.virt.hardware [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.389170] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a339b0-1fb9-433a-b8c5-1bc16db6e7c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.396470] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d64dae-055b-4e35-b217-9c97560ab324 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.497675] env[68285]: DEBUG nova.scheduler.client.report [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.553319] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891452, 'name': ReconfigVM_Task, 'duration_secs': 0.376733} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.554032] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 5266817c-ce3b-4c96-a3bd-32b631c29b81/5266817c-ce3b-4c96-a3bd-32b631c29b81.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.554756] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-067e109b-f872-4538-bc2a-5a6115085805 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.562173] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 913.562173] env[68285]: value = "task-2891453" [ 913.562173] env[68285]: _type = "Task" [ 913.562173] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.573107] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891453, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.639820] env[68285]: DEBUG oslo_concurrency.lockutils [req-c856de3f-72fe-44e2-b386-34628ed2e250 req-8f942d5c-ed4f-4ac0-b861-600bf46bdc99 service nova] Releasing lock "refresh_cache-d1b5abfa-fd38-4d17-b75f-5036af841d24" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.688065] env[68285]: INFO nova.compute.manager [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Took 45.87 seconds to build instance. [ 913.710804] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5260c57b-aee4-346d-afb5-707c39a5ae9f, 'name': SearchDatastore_Task, 'duration_secs': 0.010072} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.711075] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.711334] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.711576] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1099d4d7-989c-4511-a273-0f620388ad48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.719909] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 913.719909] env[68285]: value = "task-2891454" [ 913.719909] env[68285]: _type = "Task" [ 913.719909] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.727705] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.980935] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "940e0328-970d-4f49-a102-d8a00b8c299b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.981327] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "940e0328-970d-4f49-a102-d8a00b8c299b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.981639] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "940e0328-970d-4f49-a102-d8a00b8c299b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.981890] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "940e0328-970d-4f49-a102-d8a00b8c299b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.982133] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "940e0328-970d-4f49-a102-d8a00b8c299b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.987131] env[68285]: INFO nova.compute.manager [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Terminating instance [ 914.002639] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.003197] env[68285]: DEBUG nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 914.009476] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.510s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.009797] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.012090] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.702s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.012317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.014181] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.523s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.015936] env[68285]: INFO nova.compute.claims [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.044505] env[68285]: INFO nova.scheduler.client.report [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Deleted allocations for instance d2c3e3eb-4b05-4e08-bd08-0f42560fcdba [ 914.046798] env[68285]: INFO nova.scheduler.client.report [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Deleted allocations for instance 11de7da5-1d73-4536-b2a1-f7dbbdec14b8 [ 914.073833] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891453, 'name': Rename_Task, 'duration_secs': 0.142977} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.076024] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.076024] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b53b4d6-6877-4f3c-a2ac-30cfb148932d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.084971] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 914.084971] env[68285]: value = "task-2891455" [ 914.084971] env[68285]: _type = "Task" [ 914.084971] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.100702] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.190292] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd9b8caa-5a65-448c-b30d-e80811055e0c tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.843s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.232313] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891454, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.492837] env[68285]: DEBUG nova.compute.manager [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.493146] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.493952] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514dddb4-adb0-4e36-9957-6a8ecae47646 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.502529] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.502921] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2dfcdf3-30a5-4680-9f35-cd5a9f1d0906 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.510539] env[68285]: DEBUG oslo_vmware.api [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 914.510539] env[68285]: value = "task-2891456" [ 914.510539] env[68285]: _type = "Task" [ 914.510539] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.522093] env[68285]: DEBUG nova.compute.utils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 914.525256] env[68285]: DEBUG oslo_vmware.api [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.525592] env[68285]: DEBUG nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 914.525793] env[68285]: DEBUG nova.network.neutron [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 914.538414] env[68285]: DEBUG nova.network.neutron [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Successfully updated port: 7e46bb86-86a9-4e35-8965-1477f6e7b8af {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.554756] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ac650b42-33f6-40d7-a494-5ddf6638bf78 tempest-ServerExternalEventsTest-804730874 tempest-ServerExternalEventsTest-804730874-project-member] Lock "d2c3e3eb-4b05-4e08-bd08-0f42560fcdba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.790s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.557466] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9f42f33-ba51-4c43-b1b9-d4c0d647eb30 tempest-FloatingIPsAssociationTestJSON-216905033 tempest-FloatingIPsAssociationTestJSON-216905033-project-member] Lock "11de7da5-1d73-4536-b2a1-f7dbbdec14b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.658s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.574112] env[68285]: DEBUG nova.policy [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '557a46b01bbf41e4a343d20c8206aa96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9352aafac6e049feb8d74a91d1600224', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 914.597458] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891455, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.697780] env[68285]: DEBUG nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 914.735541] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891454, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687364} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.735541] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 914.735541] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 914.735541] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9414438f-18aa-47ba-b25b-a934581568d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.743022] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 914.743022] env[68285]: value = "task-2891457" [ 914.743022] env[68285]: _type = "Task" [ 914.743022] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.750385] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891457, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.980136] env[68285]: DEBUG nova.compute.manager [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Received event network-vif-plugged-7e46bb86-86a9-4e35-8965-1477f6e7b8af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.980136] env[68285]: DEBUG oslo_concurrency.lockutils [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] Acquiring lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.984912] env[68285]: DEBUG oslo_concurrency.lockutils [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.984912] env[68285]: DEBUG oslo_concurrency.lockutils [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.984912] env[68285]: DEBUG nova.compute.manager [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] No waiting events found dispatching network-vif-plugged-7e46bb86-86a9-4e35-8965-1477f6e7b8af {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.984912] env[68285]: WARNING nova.compute.manager [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Received unexpected event network-vif-plugged-7e46bb86-86a9-4e35-8965-1477f6e7b8af for instance with vm_state building and task_state spawning. [ 914.984912] env[68285]: DEBUG nova.compute.manager [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Received event network-changed-7e46bb86-86a9-4e35-8965-1477f6e7b8af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.984912] env[68285]: DEBUG nova.compute.manager [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Refreshing instance network info cache due to event network-changed-7e46bb86-86a9-4e35-8965-1477f6e7b8af. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 914.984912] env[68285]: DEBUG oslo_concurrency.lockutils [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] Acquiring lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.984912] env[68285]: DEBUG oslo_concurrency.lockutils [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] Acquired lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.984912] env[68285]: DEBUG nova.network.neutron [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Refreshing network info cache for port 7e46bb86-86a9-4e35-8965-1477f6e7b8af {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.024559] env[68285]: DEBUG oslo_vmware.api [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891456, 'name': PowerOffVM_Task, 'duration_secs': 0.298198} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.024559] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.024559] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.024559] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a6b481f-7495-47fd-bc30-9aaa3c512d9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.032758] env[68285]: DEBUG nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 915.041893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.101842] env[68285]: DEBUG oslo_vmware.api [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891455, 'name': PowerOnVM_Task, 'duration_secs': 0.753692} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.102129] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.102319] env[68285]: INFO nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Took 8.49 seconds to spawn the instance on the hypervisor. [ 915.102500] env[68285]: DEBUG nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 915.103341] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5484f6ad-234a-4401-b53d-3ee35dde9272 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.123086] env[68285]: DEBUG nova.network.neutron [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Successfully created port: 9e7d680e-05f0-476c-b513-752cb59b861e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 915.129209] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.129611] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.129859] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Deleting the datastore file [datastore1] 940e0328-970d-4f49-a102-d8a00b8c299b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.130374] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30eaab24-7866-4e60-8213-3fab3f7f8cf4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.137206] env[68285]: DEBUG oslo_vmware.api [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for the task: (returnval){ [ 915.137206] env[68285]: value = "task-2891459" [ 915.137206] env[68285]: _type = "Task" [ 915.137206] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.146607] env[68285]: DEBUG oslo_vmware.api [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.217466] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.253690] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891457, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114795} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.256680] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 915.258142] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68ded5c-00f0-4ca7-a2d1-70edd8734df0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.283547] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 915.286991] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdfb3978-5270-4f3e-a448-268ad675d263 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.313430] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 915.313430] env[68285]: value = "task-2891460" [ 915.313430] env[68285]: _type = "Task" [ 915.313430] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.323882] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891460, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.545193] env[68285]: DEBUG nova.network.neutron [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.632679] env[68285]: INFO nova.compute.manager [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Took 45.10 seconds to build instance. [ 915.647154] env[68285]: DEBUG oslo_vmware.api [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Task: {'id': task-2891459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.50683} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.647512] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.647715] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.647893] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.648079] env[68285]: INFO nova.compute.manager [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 915.648327] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.648540] env[68285]: DEBUG nova.compute.manager [-] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 915.648687] env[68285]: DEBUG nova.network.neutron [-] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 915.652688] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670a8d13-e426-4212-8eb7-a80f0af37bbc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.661269] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8797d4df-c9e9-4534-86b3-bffcfeb2372f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.696785] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3542af-1675-4342-a734-708a26f871bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.714041] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588fb504-8162-4767-998b-1084851c6b41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.733191] env[68285]: DEBUG nova.compute.provider_tree [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.773250] env[68285]: DEBUG nova.network.neutron [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.828710] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.014586] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.046430] env[68285]: DEBUG nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 916.087657] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.087916] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.088077] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.091613] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.091613] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.091613] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.091613] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.092253] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.092253] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.092253] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.092253] env[68285]: DEBUG nova.virt.hardware [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.093507] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e1efd1-b55b-46bb-b928-3b3c7f0c87c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.103168] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334041f7-069c-4160-9446-3ccd6249cbaa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.135050] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2f1f01b8-1b46-48fc-8ae0-c95410e90fec tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.592s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.136349] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.122s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.136528] env[68285]: DEBUG nova.compute.manager [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 916.137562] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b551fb8-a625-4e0e-8fbc-2a23d29adfd8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.146328] env[68285]: DEBUG nova.compute.manager [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 916.146904] env[68285]: DEBUG nova.objects.instance [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lazy-loading 'flavor' on Instance uuid 5266817c-ce3b-4c96-a3bd-32b631c29b81 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.212318] env[68285]: DEBUG nova.compute.manager [req-5563af14-ce51-4480-87f6-a118ca6bf268 req-daddf1d8-38cf-4df7-9df1-2a0f346ce64c service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Received event network-vif-deleted-2aa2446c-f5b4-4511-9c9f-9bd2e76047a8 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.212531] env[68285]: INFO nova.compute.manager [req-5563af14-ce51-4480-87f6-a118ca6bf268 req-daddf1d8-38cf-4df7-9df1-2a0f346ce64c service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Neutron deleted interface 2aa2446c-f5b4-4511-9c9f-9bd2e76047a8; detaching it from the instance and deleting it from the info cache [ 916.212705] env[68285]: DEBUG nova.network.neutron [req-5563af14-ce51-4480-87f6-a118ca6bf268 req-daddf1d8-38cf-4df7-9df1-2a0f346ce64c service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.219256] env[68285]: DEBUG nova.compute.manager [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 916.236962] env[68285]: DEBUG nova.scheduler.client.report [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.278282] env[68285]: DEBUG oslo_concurrency.lockutils [req-5a74f57c-9026-4995-a1d3-65e3e4e5cd53 req-26e062d9-2c69-4050-8d5b-8c85b88a08ca service nova] Releasing lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.278663] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.279816] env[68285]: DEBUG nova.network.neutron [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.323727] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891460, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.570360] env[68285]: DEBUG nova.network.neutron [-] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.640458] env[68285]: DEBUG nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 916.719382] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-563b4894-64dd-44da-b0a0-05eb235ba856 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.731170] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4063a735-d55d-4847-be14-09845fb40632 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.745656] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.746950] env[68285]: DEBUG nova.network.neutron [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Successfully updated port: 9e7d680e-05f0-476c-b513-752cb59b861e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 916.748417] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.748883] env[68285]: DEBUG nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.753754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.570s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.754526] env[68285]: DEBUG nova.objects.instance [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 916.789215] env[68285]: DEBUG nova.compute.manager [req-5563af14-ce51-4480-87f6-a118ca6bf268 req-daddf1d8-38cf-4df7-9df1-2a0f346ce64c service nova] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Detach interface failed, port_id=2aa2446c-f5b4-4511-9c9f-9bd2e76047a8, reason: Instance 940e0328-970d-4f49-a102-d8a00b8c299b could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 916.818346] env[68285]: DEBUG nova.network.neutron [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.828530] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891460, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.988418] env[68285]: DEBUG nova.network.neutron [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Updating instance_info_cache with network_info: [{"id": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "address": "fa:16:3e:0d:91:f6", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e46bb86-86", "ovs_interfaceid": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.075328] env[68285]: INFO nova.compute.manager [-] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Took 1.43 seconds to deallocate network for instance. [ 917.158374] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.158374] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2531dd0-cefe-4479-9d96-5f6cf49268c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.166209] env[68285]: DEBUG oslo_vmware.api [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 917.166209] env[68285]: value = "task-2891461" [ 917.166209] env[68285]: _type = "Task" [ 917.166209] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.170353] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.177080] env[68285]: DEBUG oslo_vmware.api [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.256401] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-2a1cc678-2bb2-403e-b6e8-afdeb8362eac" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.256401] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-2a1cc678-2bb2-403e-b6e8-afdeb8362eac" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.256401] env[68285]: DEBUG nova.network.neutron [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.273501] env[68285]: DEBUG nova.compute.utils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 917.273501] env[68285]: DEBUG nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 917.273683] env[68285]: DEBUG nova.network.neutron [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 917.321521] env[68285]: DEBUG nova.policy [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080ca112b7534d1284942bdd41514e66', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '923c0329269c41159ae4469d358fe25f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.333945] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891460, 'name': ReconfigVM_Task, 'duration_secs': 1.581729} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.334277] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Reconfigured VM instance instance-0000002d to attach disk [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.334971] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19c1259a-c797-47cb-bf60-d22b44ce4cea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.342564] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 917.342564] env[68285]: value = "task-2891462" [ 917.342564] env[68285]: _type = "Task" [ 917.342564] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.351411] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891462, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.491877] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.493579] env[68285]: DEBUG nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Instance network_info: |[{"id": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "address": "fa:16:3e:0d:91:f6", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e46bb86-86", "ovs_interfaceid": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 917.493985] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:91:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e46bb86-86a9-4e35-8965-1477f6e7b8af', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.501664] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 917.502120] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.502398] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af536762-d650-49bc-97c7-243600230e07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.531719] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.531719] env[68285]: value = "task-2891463" [ 917.531719] env[68285]: _type = "Task" [ 917.531719] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.539228] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891463, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.584249] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.977094] env[68285]: DEBUG oslo_vmware.api [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891461, 'name': PowerOffVM_Task, 'duration_secs': 0.190615} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.977094] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.977094] env[68285]: DEBUG nova.compute.manager [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 917.977094] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbecca37-699f-49f0-a80d-86f51c171bf3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.977094] env[68285]: DEBUG oslo_concurrency.lockutils [None req-291175db-d1b0-43be-a726-16a4868e44ef tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.977094] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.505s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.977094] env[68285]: DEBUG nova.objects.instance [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lazy-loading 'resources' on Instance uuid 29981c10-c6dd-4852-94ad-1f8f0135b8cc {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.977094] env[68285]: DEBUG nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.977094] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891462, 'name': Rename_Task, 'duration_secs': 0.145321} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.977094] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.977094] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e8c992f-5fe0-4ad5-893e-b7a62e20acc0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.977094] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 917.977094] env[68285]: value = "task-2891464" [ 917.977094] env[68285]: _type = "Task" [ 917.977094] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.977094] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891464, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.004095] env[68285]: DEBUG nova.network.neutron [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Successfully created port: 73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 918.043496] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891463, 'name': CreateVM_Task, 'duration_secs': 0.345598} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.043496] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 918.045019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.045273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.045571] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 918.045842] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6045dbda-074b-4f30-9d48-e59d87ff1152 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.051453] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 918.051453] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e8efa2-3796-f945-a9b8-9b1191918d6e" [ 918.051453] env[68285]: _type = "Task" [ 918.051453] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.056475] env[68285]: DEBUG nova.network.neutron [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.067939] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e8efa2-3796-f945-a9b8-9b1191918d6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.318253] env[68285]: DEBUG nova.network.neutron [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Updating instance_info_cache with network_info: [{"id": "9e7d680e-05f0-476c-b513-752cb59b861e", "address": "fa:16:3e:09:32:d6", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e7d680e-05", "ovs_interfaceid": "9e7d680e-05f0-476c-b513-752cb59b861e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.370029] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891464, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.483934] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c84c6878-40f4-4a99-ae2c-1373a6febd22 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.347s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.563205] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e8efa2-3796-f945-a9b8-9b1191918d6e, 'name': SearchDatastore_Task, 'duration_secs': 0.013102} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.563525] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.563834] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.564202] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.564436] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.564727] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.565137] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01b7ff51-1eb8-4fea-b53c-26c0fb5e9086 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.578350] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.580064] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.580064] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ce95fd3-8ddd-4b9a-ac57-919110ca5ae3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.585186] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 918.585186] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5235b4ac-bd7d-e950-a3e8-e14df33dbc23" [ 918.585186] env[68285]: _type = "Task" [ 918.585186] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.596105] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5235b4ac-bd7d-e950-a3e8-e14df33dbc23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.718136] env[68285]: DEBUG nova.compute.manager [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Received event network-vif-plugged-9e7d680e-05f0-476c-b513-752cb59b861e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.718356] env[68285]: DEBUG oslo_concurrency.lockutils [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] Acquiring lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.718565] env[68285]: DEBUG oslo_concurrency.lockutils [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.718732] env[68285]: DEBUG oslo_concurrency.lockutils [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.718890] env[68285]: DEBUG nova.compute.manager [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] No waiting events found dispatching network-vif-plugged-9e7d680e-05f0-476c-b513-752cb59b861e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 918.719417] env[68285]: WARNING nova.compute.manager [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Received unexpected event network-vif-plugged-9e7d680e-05f0-476c-b513-752cb59b861e for instance with vm_state building and task_state spawning. [ 918.719642] env[68285]: DEBUG nova.compute.manager [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Received event network-changed-9e7d680e-05f0-476c-b513-752cb59b861e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.719829] env[68285]: DEBUG nova.compute.manager [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Refreshing instance network info cache due to event network-changed-9e7d680e-05f0-476c-b513-752cb59b861e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 918.719974] env[68285]: DEBUG oslo_concurrency.lockutils [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] Acquiring lock "refresh_cache-2a1cc678-2bb2-403e-b6e8-afdeb8362eac" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.821132] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-2a1cc678-2bb2-403e-b6e8-afdeb8362eac" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.821477] env[68285]: DEBUG nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Instance network_info: |[{"id": "9e7d680e-05f0-476c-b513-752cb59b861e", "address": "fa:16:3e:09:32:d6", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e7d680e-05", "ovs_interfaceid": "9e7d680e-05f0-476c-b513-752cb59b861e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 918.821830] env[68285]: DEBUG oslo_concurrency.lockutils [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] Acquired lock "refresh_cache-2a1cc678-2bb2-403e-b6e8-afdeb8362eac" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.821945] env[68285]: DEBUG nova.network.neutron [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Refreshing network info cache for port 9e7d680e-05f0-476c-b513-752cb59b861e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.823167] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:32:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e7d680e-05f0-476c-b513-752cb59b861e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.830528] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.833710] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 918.835056] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a80579d5-38a4-4bf8-9589-652bb9aa173c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.859046] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.859046] env[68285]: value = "task-2891465" [ 918.859046] env[68285]: _type = "Task" [ 918.859046] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.874929] env[68285]: DEBUG oslo_vmware.api [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891464, 'name': PowerOnVM_Task, 'duration_secs': 0.529531} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.878505] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.878723] env[68285]: INFO nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Took 9.56 seconds to spawn the instance on the hypervisor. [ 918.878915] env[68285]: DEBUG nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.879156] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891465, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.882051] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6515ff9f-5c73-4e1e-9041-f8332d5286db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.994445] env[68285]: DEBUG nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 919.002493] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "8a848ec8-1ae0-4437-be4f-49219214d11f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.002997] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "8a848ec8-1ae0-4437-be4f-49219214d11f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.024831] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade5c66b-c428-43c6-b4b2-28150936a8ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.040979] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 919.042870] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 919.043043] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 919.043242] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 919.043417] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 919.044863] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 919.045844] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 919.045844] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 919.045844] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 919.045844] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 919.046506] env[68285]: DEBUG nova.virt.hardware [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 919.047532] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7ceb0a-cb10-4373-b898-73b24e7009cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.051575] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58a3c87-63d6-4c2c-8d90-98c4098e81de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.083518] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b314ef8-42fa-45cc-8fec-225a47567b87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.090243] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56f7b6a-8026-4fa4-b2e3-8637f523cbeb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.109592] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5235b4ac-bd7d-e950-a3e8-e14df33dbc23, 'name': SearchDatastore_Task, 'duration_secs': 0.017356} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.110771] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813f6177-cbba-447e-865f-2667e5d8a5fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.114839] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f39ae93-b2a9-44eb-a3f7-3870472fa143 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.120870] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 919.120870] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5275bb8c-6c17-8de5-2154-123135cd8496" [ 919.120870] env[68285]: _type = "Task" [ 919.120870] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.128994] env[68285]: DEBUG nova.compute.provider_tree [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.138744] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5275bb8c-6c17-8de5-2154-123135cd8496, 'name': SearchDatastore_Task, 'duration_secs': 0.011186} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.138994] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.139264] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5/7df1a9b4-e363-4e35-a8d5-6b09b671e6a5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.139552] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b99b1f8-e78a-403d-aa79-2f6308627040 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.146759] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 919.146759] env[68285]: value = "task-2891466" [ 919.146759] env[68285]: _type = "Task" [ 919.146759] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.156545] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.379782] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891465, 'name': CreateVM_Task, 'duration_secs': 0.416039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.380154] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 919.384162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.384162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.384162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 919.384162] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-333b0fcc-5be1-4b25-9e4c-401b51c70b30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.388944] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 919.388944] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dad18e-19a9-612c-fe17-2b0e24776718" [ 919.388944] env[68285]: _type = "Task" [ 919.388944] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.412376] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52dad18e-19a9-612c-fe17-2b0e24776718, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.413615] env[68285]: INFO nova.compute.manager [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Took 44.15 seconds to build instance. [ 919.636690] env[68285]: DEBUG nova.scheduler.client.report [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.647441] env[68285]: DEBUG nova.network.neutron [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Updated VIF entry in instance network info cache for port 9e7d680e-05f0-476c-b513-752cb59b861e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.647843] env[68285]: DEBUG nova.network.neutron [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Updating instance_info_cache with network_info: [{"id": "9e7d680e-05f0-476c-b513-752cb59b861e", "address": "fa:16:3e:09:32:d6", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e7d680e-05", "ovs_interfaceid": "9e7d680e-05f0-476c-b513-752cb59b861e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.664416] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891466, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.801135] env[68285]: DEBUG nova.network.neutron [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Successfully updated port: 73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.900920] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52dad18e-19a9-612c-fe17-2b0e24776718, 'name': SearchDatastore_Task, 'duration_secs': 0.021554} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.902531] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.902772] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.903016] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.903168] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.903348] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 919.903605] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f70142b-1d6d-4c80-bfa6-000283bc57fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.916069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38830051-e399-43db-afdc-66db3349d79e tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.527s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.921065] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 919.921065] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 919.921065] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d8539a6-e9c5-48d5-b454-982e731b0403 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.924611] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 919.924611] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c4ba81-e9a6-5036-c73f-d7e6b8963ce7" [ 919.924611] env[68285]: _type = "Task" [ 919.924611] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.933835] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c4ba81-e9a6-5036-c73f-d7e6b8963ce7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.143780] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.371s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.148514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.526s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.150122] env[68285]: INFO nova.compute.claims [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.165024] env[68285]: DEBUG oslo_concurrency.lockutils [req-3d8bd288-5d7a-426d-9bed-37fdc5f8a260 req-665ec8db-c74b-4bc8-89cb-4538084a9040 service nova] Releasing lock "refresh_cache-2a1cc678-2bb2-403e-b6e8-afdeb8362eac" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.170196] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891466, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586651} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.170554] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5/7df1a9b4-e363-4e35-a8d5-6b09b671e6a5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.170976] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.171757] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-937ad907-bc0e-406e-87ef-75f210b1046f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.179834] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 920.179834] env[68285]: value = "task-2891467" [ 920.179834] env[68285]: _type = "Task" [ 920.179834] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.188347] env[68285]: INFO nova.scheduler.client.report [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Deleted allocations for instance 29981c10-c6dd-4852-94ad-1f8f0135b8cc [ 920.194485] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891467, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.303741] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.303906] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.304078] env[68285]: DEBUG nova.network.neutron [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.314131] env[68285]: DEBUG nova.compute.manager [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.315814] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a98154-255c-441a-a30d-01ed78b8484b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.420405] env[68285]: DEBUG nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 920.437031] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c4ba81-e9a6-5036-c73f-d7e6b8963ce7, 'name': SearchDatastore_Task, 'duration_secs': 0.022791} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.437648] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25a4676f-40d6-4ea8-90c3-8dd8b6f8fb1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.443873] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 920.443873] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5254873d-6be5-15d1-1556-3dd68f37a913" [ 920.443873] env[68285]: _type = "Task" [ 920.443873] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.453825] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5254873d-6be5-15d1-1556-3dd68f37a913, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.512971] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "a2a7590d-c415-4955-8a25-4b1411449557" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.513348] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "a2a7590d-c415-4955-8a25-4b1411449557" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.513526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "a2a7590d-c415-4955-8a25-4b1411449557-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.513846] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "a2a7590d-c415-4955-8a25-4b1411449557-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.514041] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "a2a7590d-c415-4955-8a25-4b1411449557-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.519913] env[68285]: INFO nova.compute.manager [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Terminating instance [ 920.701408] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891467, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074709} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.702739] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0187d5b8-7c75-420c-b707-8a0cfa293417 tempest-ServerShowV254Test-223952110 tempest-ServerShowV254Test-223952110-project-member] Lock "29981c10-c6dd-4852-94ad-1f8f0135b8cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.719s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.703229] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.704914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad0b163-cdb8-4532-b82a-a224f17f677f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.730257] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5/7df1a9b4-e363-4e35-a8d5-6b09b671e6a5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.730851] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d9b9e6f-c7ab-4519-bde2-df3ce439c623 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.750969] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 920.750969] env[68285]: value = "task-2891468" [ 920.750969] env[68285]: _type = "Task" [ 920.750969] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.759727] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891468, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.831268] env[68285]: INFO nova.compute.manager [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] instance snapshotting [ 920.831669] env[68285]: WARNING nova.compute.manager [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 920.836069] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876a2bcf-4caf-49d3-ae56-f85a292ce4bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.841030] env[68285]: DEBUG nova.network.neutron [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.860955] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b06515a-ca7a-4756-ae5c-f7a777a90830 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.950638] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.957163] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5254873d-6be5-15d1-1556-3dd68f37a913, 'name': SearchDatastore_Task, 'duration_secs': 0.052903} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.957478] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.957782] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 2a1cc678-2bb2-403e-b6e8-afdeb8362eac/2a1cc678-2bb2-403e-b6e8-afdeb8362eac.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 920.958167] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13ac69bd-ac8a-4025-a980-81ab3c1c5542 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.965784] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 920.965784] env[68285]: value = "task-2891469" [ 920.965784] env[68285]: _type = "Task" [ 920.965784] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.977025] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.017564] env[68285]: DEBUG nova.network.neutron [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updating instance_info_cache with network_info: [{"id": "73717dde-af77-47f8-896b-24153f94b949", "address": "fa:16:3e:34:92:a1", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73717dde-af", "ovs_interfaceid": "73717dde-af77-47f8-896b-24153f94b949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.026212] env[68285]: DEBUG nova.compute.manager [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.026830] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.027832] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6245d21-878c-4bfe-97ff-1b572fd824d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.035773] env[68285]: DEBUG nova.compute.manager [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Received event network-vif-plugged-73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.035773] env[68285]: DEBUG oslo_concurrency.lockutils [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] Acquiring lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.035773] env[68285]: DEBUG oslo_concurrency.lockutils [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.035773] env[68285]: DEBUG oslo_concurrency.lockutils [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.035773] env[68285]: DEBUG nova.compute.manager [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] No waiting events found dispatching network-vif-plugged-73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 921.035773] env[68285]: WARNING nova.compute.manager [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Received unexpected event network-vif-plugged-73717dde-af77-47f8-896b-24153f94b949 for instance with vm_state building and task_state spawning. [ 921.036064] env[68285]: DEBUG nova.compute.manager [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Received event network-changed-73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.036586] env[68285]: DEBUG nova.compute.manager [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Refreshing instance network info cache due to event network-changed-73717dde-af77-47f8-896b-24153f94b949. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 921.036586] env[68285]: DEBUG oslo_concurrency.lockutils [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] Acquiring lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.043897] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.044244] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-959af152-b37a-46bc-afe1-9caa4c8fa5e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.053844] env[68285]: DEBUG oslo_vmware.api [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 921.053844] env[68285]: value = "task-2891470" [ 921.053844] env[68285]: _type = "Task" [ 921.053844] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.064374] env[68285]: DEBUG oslo_vmware.api [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.194831] env[68285]: INFO nova.compute.manager [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Rebuilding instance [ 921.265884] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891468, 'name': ReconfigVM_Task, 'duration_secs': 0.308362} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.275377] env[68285]: DEBUG nova.compute.manager [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 921.275377] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5/7df1a9b4-e363-4e35-a8d5-6b09b671e6a5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.275377] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bc2f75-ee9e-4815-a650-fdf873fbee87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.277353] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a6b3d9d-4f76-4457-80f0-46242d31a77b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.292773] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 921.292773] env[68285]: value = "task-2891471" [ 921.292773] env[68285]: _type = "Task" [ 921.292773] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.303469] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891471, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.376370] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 921.376889] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-89e8d116-878c-4b09-b31d-50ba5b4ba724 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.390103] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 921.390103] env[68285]: value = "task-2891472" [ 921.390103] env[68285]: _type = "Task" [ 921.390103] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.403428] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891472, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.481469] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891469, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.525837] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.526186] env[68285]: DEBUG nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Instance network_info: |[{"id": "73717dde-af77-47f8-896b-24153f94b949", "address": "fa:16:3e:34:92:a1", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73717dde-af", "ovs_interfaceid": "73717dde-af77-47f8-896b-24153f94b949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 921.526936] env[68285]: DEBUG oslo_concurrency.lockutils [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] Acquired lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.527040] env[68285]: DEBUG nova.network.neutron [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Refreshing network info cache for port 73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.528283] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:92:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73717dde-af77-47f8-896b-24153f94b949', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.537763] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Creating folder: Project (923c0329269c41159ae4469d358fe25f). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.541139] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a94f10a-1545-469b-bd1c-f304568d6114 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.554238] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Created folder: Project (923c0329269c41159ae4469d358fe25f) in parent group-v580775. [ 921.554238] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Creating folder: Instances. Parent ref: group-v580903. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.554238] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1735ce28-172b-4d27-99f4-3011b8ebef35 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.567761] env[68285]: DEBUG oslo_vmware.api [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891470, 'name': PowerOffVM_Task, 'duration_secs': 0.209748} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.568127] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.568313] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.569645] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfb6f579-30be-4462-bfe4-62df2018241d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.571322] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Created folder: Instances in parent group-v580903. [ 921.571463] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.574215] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.574787] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30b3ef42-052c-44ab-abbc-b41562ffdbba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.599618] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.599618] env[68285]: value = "task-2891476" [ 921.599618] env[68285]: _type = "Task" [ 921.599618] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.612401] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891476, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.635084] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.635441] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.635633] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Deleting the datastore file [datastore2] a2a7590d-c415-4955-8a25-4b1411449557 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.635908] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ea820fd-6c84-41ce-8c7f-750adb42adbc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.642839] env[68285]: DEBUG oslo_vmware.api [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for the task: (returnval){ [ 921.642839] env[68285]: value = "task-2891477" [ 921.642839] env[68285]: _type = "Task" [ 921.642839] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.651944] env[68285]: DEBUG oslo_vmware.api [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.809078] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891471, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.813196] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb91275-67e8-4ab0-bd82-b6dc0785d094 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.818653] env[68285]: DEBUG nova.network.neutron [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updated VIF entry in instance network info cache for port 73717dde-af77-47f8-896b-24153f94b949. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.818653] env[68285]: DEBUG nova.network.neutron [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updating instance_info_cache with network_info: [{"id": "73717dde-af77-47f8-896b-24153f94b949", "address": "fa:16:3e:34:92:a1", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73717dde-af", "ovs_interfaceid": "73717dde-af77-47f8-896b-24153f94b949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.826323] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a54c52-2e40-43d7-99a6-30cd4a4ef80c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.860298] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204e7279-dbd2-4de5-a3f7-da5eee655107 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.868504] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e45da7-8a69-4b50-b787-52d1e5bcd129 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.883881] env[68285]: DEBUG nova.compute.provider_tree [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.898630] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891472, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.980847] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550625} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.980903] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 2a1cc678-2bb2-403e-b6e8-afdeb8362eac/2a1cc678-2bb2-403e-b6e8-afdeb8362eac.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 921.981885] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 921.981885] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad309385-5867-4072-8ac7-59d7361086c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.989185] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 921.989185] env[68285]: value = "task-2891478" [ 921.989185] env[68285]: _type = "Task" [ 921.989185] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.999467] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891478, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.110874] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891476, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.152852] env[68285]: DEBUG oslo_vmware.api [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Task: {'id': task-2891477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149124} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.153152] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.153375] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.153547] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.153765] env[68285]: INFO nova.compute.manager [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Took 1.13 seconds to destroy the instance on the hypervisor. [ 922.154029] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.154229] env[68285]: DEBUG nova.compute.manager [-] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 922.154362] env[68285]: DEBUG nova.network.neutron [-] [instance: a2a7590d-c415-4955-8a25-4b1411449557] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.301871] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.303246] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-580f3905-56f3-4b8f-b7a6-3a7a4c42727e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.315872] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891471, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.319785] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 922.319785] env[68285]: value = "task-2891479" [ 922.319785] env[68285]: _type = "Task" [ 922.319785] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.325697] env[68285]: DEBUG oslo_concurrency.lockutils [req-0dc4e1fe-9fa2-44d0-8f6d-c1855e8651d6 req-2074e01f-103d-4bed-b517-369271c2a178 service nova] Releasing lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.333008] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.386489] env[68285]: DEBUG nova.scheduler.client.report [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.407572] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891472, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.502271] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891478, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072939} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.502617] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.503800] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d00cfb-aa14-4826-bcc5-cab46ac652b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.530990] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 2a1cc678-2bb2-403e-b6e8-afdeb8362eac/2a1cc678-2bb2-403e-b6e8-afdeb8362eac.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.531299] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-813470ae-6096-4818-a2a8-d95fb00e80dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.553549] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.553549] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.557980] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 922.557980] env[68285]: value = "task-2891480" [ 922.557980] env[68285]: _type = "Task" [ 922.557980] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.567675] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891480, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.610230] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891476, 'name': CreateVM_Task, 'duration_secs': 0.593478} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.610477] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.611142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.611300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.611608] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 922.611858] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20fac4ec-c384-456f-99ae-8cc7e7af8b0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.616803] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 922.616803] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529d252d-1b01-459c-77b1-e3836f6b2043" [ 922.616803] env[68285]: _type = "Task" [ 922.616803] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.626550] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529d252d-1b01-459c-77b1-e3836f6b2043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.808300] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891471, 'name': Rename_Task, 'duration_secs': 1.163824} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.808746] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.809304] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3c19338-b538-4781-b5dd-bea3a008612f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.817537] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 922.817537] env[68285]: value = "task-2891481" [ 922.817537] env[68285]: _type = "Task" [ 922.817537] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.831565] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891479, 'name': PowerOffVM_Task, 'duration_secs': 0.275951} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.834479] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.834774] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 922.835095] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.835843] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f5d4df-e783-47e0-9552-ebf59fb8da7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.842292] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 922.842576] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f05bf0f0-5487-428f-9f1a-fd8887c935f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.899158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.899849] env[68285]: DEBUG nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 922.911910] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.043s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.912123] env[68285]: DEBUG nova.objects.instance [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lazy-loading 'resources' on Instance uuid bda5b2fb-1875-4078-a4c1-f76f6abeaaf5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.913804] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891472, 'name': CreateSnapshot_Task, 'duration_secs': 1.047105} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.916108] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 922.916108] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.916108] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.916325] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleting the datastore file [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.917202] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aa1b9a-a60c-4426-9ba7-19201867dee0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.921405] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0c656f9-320a-4ed1-accb-1af9783ba093 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.936310] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 922.936310] env[68285]: value = "task-2891483" [ 922.936310] env[68285]: _type = "Task" [ 922.936310] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.947369] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.067670] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891480, 'name': ReconfigVM_Task, 'duration_secs': 0.433801} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.067981] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 2a1cc678-2bb2-403e-b6e8-afdeb8362eac/2a1cc678-2bb2-403e-b6e8-afdeb8362eac.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 923.069596] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b0aa5c5-c5af-402a-b4a3-e8ad72771f36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.072575] env[68285]: DEBUG nova.compute.manager [req-ef093f94-ad89-4c59-91e4-bf3bd16d2114 req-cd30c16a-f88c-4ca2-b823-4e24f08235ab service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Received event network-vif-deleted-90718536-f982-4e0c-8bc8-5ce84e9f0a55 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 923.073766] env[68285]: INFO nova.compute.manager [req-ef093f94-ad89-4c59-91e4-bf3bd16d2114 req-cd30c16a-f88c-4ca2-b823-4e24f08235ab service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Neutron deleted interface 90718536-f982-4e0c-8bc8-5ce84e9f0a55; detaching it from the instance and deleting it from the info cache [ 923.073766] env[68285]: DEBUG nova.network.neutron [req-ef093f94-ad89-4c59-91e4-bf3bd16d2114 req-cd30c16a-f88c-4ca2-b823-4e24f08235ab service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.080132] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 923.080132] env[68285]: value = "task-2891484" [ 923.080132] env[68285]: _type = "Task" [ 923.080132] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.089886] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891484, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.127364] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529d252d-1b01-459c-77b1-e3836f6b2043, 'name': SearchDatastore_Task, 'duration_secs': 0.025458} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.127785] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.128062] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.128341] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.128521] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.128755] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.129028] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0441844-c8ff-4dd2-9503-1c2186ece848 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.137727] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.137914] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.138670] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d9ff991-1ee0-49f4-8033-4051bb92ac26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.144140] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 923.144140] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520acfdb-14f2-9cf0-1b45-55f5e67e275c" [ 923.144140] env[68285]: _type = "Task" [ 923.144140] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.152984] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520acfdb-14f2-9cf0-1b45-55f5e67e275c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.220977] env[68285]: DEBUG nova.network.neutron [-] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.327455] env[68285]: DEBUG oslo_vmware.api [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891481, 'name': PowerOnVM_Task, 'duration_secs': 0.482594} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.327782] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 923.327994] env[68285]: INFO nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Took 9.97 seconds to spawn the instance on the hypervisor. [ 923.328194] env[68285]: DEBUG nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.328971] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a56e7a-4b26-43ac-9e57-15cd9df91a3a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.424556] env[68285]: DEBUG nova.compute.utils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 923.426712] env[68285]: DEBUG nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 923.447431] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 923.451987] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e78bf826-0812-4e55-b733-404f010f07ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.469086] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143629} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.470039] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.470240] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.470436] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.472780] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 923.472780] env[68285]: value = "task-2891485" [ 923.472780] env[68285]: _type = "Task" [ 923.472780] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.480894] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891485, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.577407] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c90fc620-a9f0-4b60-a2f8-856fc58db29f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.588176] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89a56ca-037f-49fa-8de8-3d2252515c17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.604841] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891484, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.631599] env[68285]: DEBUG nova.compute.manager [req-ef093f94-ad89-4c59-91e4-bf3bd16d2114 req-cd30c16a-f88c-4ca2-b823-4e24f08235ab service nova] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Detach interface failed, port_id=90718536-f982-4e0c-8bc8-5ce84e9f0a55, reason: Instance a2a7590d-c415-4955-8a25-4b1411449557 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 923.653326] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520acfdb-14f2-9cf0-1b45-55f5e67e275c, 'name': SearchDatastore_Task, 'duration_secs': 0.008477} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.654126] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aea0dba6-8c63-4e43-94ff-220592fa800c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.663019] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 923.663019] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ccfc8e-6ba3-14aa-42a6-aba4fea900e2" [ 923.663019] env[68285]: _type = "Task" [ 923.663019] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.669498] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ccfc8e-6ba3-14aa-42a6-aba4fea900e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.727654] env[68285]: INFO nova.compute.manager [-] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Took 1.57 seconds to deallocate network for instance. [ 923.849819] env[68285]: INFO nova.compute.manager [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Took 45.02 seconds to build instance. [ 923.929667] env[68285]: DEBUG nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 923.952438] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9e9f56-0450-4a1d-886d-5f01b3a21587 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.960604] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60109417-efb9-40f4-a19b-1a3ac6414671 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.998187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ddcfc7-5cad-4329-803d-1e24b813fffd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.006360] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891485, 'name': CloneVM_Task} progress is 93%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.012019] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0553417-6560-489d-8981-0ec95dc4cf4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.026617] env[68285]: DEBUG nova.compute.provider_tree [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.092734] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891484, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.173009] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ccfc8e-6ba3-14aa-42a6-aba4fea900e2, 'name': SearchDatastore_Task, 'duration_secs': 0.010153} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.173287] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.173629] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] d4f20336-9c29-4aac-8c0d-f577749cd7d7/d4f20336-9c29-4aac-8c0d-f577749cd7d7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.173904] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03cb9e81-78f7-4dc3-b42f-2e255fba8f1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.180855] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 924.180855] env[68285]: value = "task-2891486" [ 924.180855] env[68285]: _type = "Task" [ 924.180855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.190566] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.234884] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.352514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed39df86-7ec8-42be-9a18-fbbf7fe3bffd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.511s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.507054] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891485, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.528355] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 924.528715] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.528887] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 924.529082] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.529230] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 924.529390] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 924.529585] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 924.529743] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 924.529933] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 924.530451] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 924.530451] env[68285]: DEBUG nova.virt.hardware [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 924.531199] env[68285]: DEBUG nova.scheduler.client.report [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.535063] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69000551-73ab-4f04-ae02-c565aee31667 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.543955] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa50237-3282-477d-9b2a-2bac49df7cd9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.558458] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:9a:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '001d3974-db8e-494c-b536-d7415394a0e5', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.566301] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.567262] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.567502] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fccc4a2-5faf-417e-bf8b-cd57f477ec44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.592054] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891484, 'name': Rename_Task, 'duration_secs': 1.298375} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.593293] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 924.593570] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.593570] env[68285]: value = "task-2891487" [ 924.593570] env[68285]: _type = "Task" [ 924.593570] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.593801] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6f001a5-2929-4e31-9c8e-35c0c0840dfd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.605666] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891487, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.607168] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 924.607168] env[68285]: value = "task-2891488" [ 924.607168] env[68285]: _type = "Task" [ 924.607168] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.615198] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.691040] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891486, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452817} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.691186] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] d4f20336-9c29-4aac-8c0d-f577749cd7d7/d4f20336-9c29-4aac-8c0d-f577749cd7d7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.691340] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.691585] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c774d10-763a-4cc3-ba98-4a2504a1c479 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.699050] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 924.699050] env[68285]: value = "task-2891489" [ 924.699050] env[68285]: _type = "Task" [ 924.699050] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.706451] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.807631] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] Acquiring lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.807884] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] Acquired lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.808166] env[68285]: DEBUG nova.network.neutron [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.855856] env[68285]: DEBUG nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 924.941337] env[68285]: DEBUG nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 924.966772] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 924.966981] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.967231] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 924.967461] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.967637] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 924.967792] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 924.968033] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 924.968198] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 924.968364] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 924.968553] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 924.968780] env[68285]: DEBUG nova.virt.hardware [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 924.969709] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25ebb7f-1ad7-4d7f-abcb-630ed005c6cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.979511] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9a0f4b-54b3-4885-b7ce-5bcd8b823925 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.993174] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.998962] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Creating folder: Project (878380b1de2647bc8513cd1ad7694474). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 924.999407] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09590bae-a7e4-47ef-9a7b-20cd70e329ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.010100] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891485, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.011689] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Created folder: Project (878380b1de2647bc8513cd1ad7694474) in parent group-v580775. [ 925.011871] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Creating folder: Instances. Parent ref: group-v580909. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 925.012116] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9c55f3e-dd58-45c2-860a-eab920be3e23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.022855] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Created folder: Instances in parent group-v580909. [ 925.023150] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 925.023361] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 925.023583] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cf2a6a9-d493-4b9c-892a-052f5a8a856d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.039495] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.127s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.042626] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.481s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.042852] env[68285]: DEBUG nova.objects.instance [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lazy-loading 'resources' on Instance uuid 753bb2f7-bf0a-401e-81af-93982558d3b7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.044469] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 925.044469] env[68285]: value = "task-2891492" [ 925.044469] env[68285]: _type = "Task" [ 925.044469] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.051985] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891492, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.071309] env[68285]: INFO nova.scheduler.client.report [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleted allocations for instance bda5b2fb-1875-4078-a4c1-f76f6abeaaf5 [ 925.104751] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891487, 'name': CreateVM_Task, 'duration_secs': 0.370011} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.104932] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.105626] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.105785] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.106124] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.106397] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf0617ad-0c98-4e83-9790-68c3644a8198 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.115665] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 925.115665] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52db496c-79f2-e57f-01a9-39e2e693043d" [ 925.115665] env[68285]: _type = "Task" [ 925.115665] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.121896] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891488, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.129915] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52db496c-79f2-e57f-01a9-39e2e693043d, 'name': SearchDatastore_Task, 'duration_secs': 0.011312} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.130355] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.130600] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.130836] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.130982] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.131178] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.131432] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1071037-1586-43c6-b4da-5d2355b940eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.143247] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.143247] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.143247] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d73f847-c0ba-43b7-9a01-c614f6e0f1dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.145251] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 925.145251] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527e6c6f-b3b5-8d73-fcb5-0c6dce35e4cd" [ 925.145251] env[68285]: _type = "Task" [ 925.145251] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.154569] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527e6c6f-b3b5-8d73-fcb5-0c6dce35e4cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.207617] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071924} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.207929] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.208808] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0870e8-912c-4926-aee4-48dc773adc42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.233128] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] d4f20336-9c29-4aac-8c0d-f577749cd7d7/d4f20336-9c29-4aac-8c0d-f577749cd7d7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.233433] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d347137d-5303-4a5e-9ce6-6ef38463de9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.257589] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 925.257589] env[68285]: value = "task-2891493" [ 925.257589] env[68285]: _type = "Task" [ 925.257589] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.266678] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891493, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.377807] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.509596] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891485, 'name': CloneVM_Task, 'duration_secs': 1.710727} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.510501] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Created linked-clone VM from snapshot [ 925.513988] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cc0723-c191-4a2b-b7d9-012d795dd6b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.521917] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Uploading image 99446637-e427-4a35-9dd2-98fd5edf8d53 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 925.554259] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 925.554259] env[68285]: value = "vm-580907" [ 925.554259] env[68285]: _type = "VirtualMachine" [ 925.554259] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 925.555329] env[68285]: DEBUG nova.network.neutron [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Updating instance_info_cache with network_info: [{"id": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "address": "fa:16:3e:0d:91:f6", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e46bb86-86", "ovs_interfaceid": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.557183] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-078cacd6-73fc-4c73-b27d-99691d2b09fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.564070] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891492, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.565648] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease: (returnval){ [ 925.565648] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d95142-3fa4-078f-b813-d9d645f38065" [ 925.565648] env[68285]: _type = "HttpNfcLease" [ 925.565648] env[68285]: } obtained for exporting VM: (result){ [ 925.565648] env[68285]: value = "vm-580907" [ 925.565648] env[68285]: _type = "VirtualMachine" [ 925.565648] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 925.565944] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the lease: (returnval){ [ 925.565944] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d95142-3fa4-078f-b813-d9d645f38065" [ 925.565944] env[68285]: _type = "HttpNfcLease" [ 925.565944] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 925.574561] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 925.574561] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d95142-3fa4-078f-b813-d9d645f38065" [ 925.574561] env[68285]: _type = "HttpNfcLease" [ 925.574561] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 925.579271] env[68285]: DEBUG oslo_concurrency.lockutils [None req-27ba495c-8a27-49ab-b27d-65ae7395822e tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "bda5b2fb-1875-4078-a4c1-f76f6abeaaf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.466s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.622158] env[68285]: DEBUG oslo_vmware.api [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891488, 'name': PowerOnVM_Task, 'duration_secs': 0.783616} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.622493] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.622698] env[68285]: INFO nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Took 9.58 seconds to spawn the instance on the hypervisor. [ 925.622871] env[68285]: DEBUG nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 925.623693] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64dea993-9e00-4f27-bfbb-13afba2113a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.656817] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527e6c6f-b3b5-8d73-fcb5-0c6dce35e4cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009725} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.661022] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b8cc915-2433-4a73-92ac-5a4bb72ad5a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.662758] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 925.662758] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5239b2ca-726b-4b3a-7235-72b2cecdabd2" [ 925.662758] env[68285]: _type = "Task" [ 925.662758] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.673789] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5239b2ca-726b-4b3a-7235-72b2cecdabd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.772853] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.045191] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11ff8f7-3f36-4fc6-a44c-589810607e1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.056396] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891492, 'name': CreateVM_Task, 'duration_secs': 0.748354} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.058176] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 926.058690] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.058855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.059177] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 926.060138] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bf83ca-209d-403a-8f24-503ade62db33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.063289] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] Releasing lock "refresh_cache-7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.063501] env[68285]: DEBUG nova.compute.manager [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Inject network info {{(pid=68285) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 926.063752] env[68285]: DEBUG nova.compute.manager [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] network_info to inject: |[{"id": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "address": "fa:16:3e:0d:91:f6", "network": {"id": "d7225652-1f39-4108-a3ec-2fe16c2f3612", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-725430316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d03ce152e74cec8910b12d34ad8ba6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e46bb86-86", "ovs_interfaceid": "7e46bb86-86a9-4e35-8965-1477f6e7b8af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 926.068364] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Reconfiguring VM instance to set the machine id {{(pid=68285) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 926.068636] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b66850c-8b80-40dd-b35a-0ad7074b218e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.072081] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffa50271-c861-46d2-a39a-9c7975904040 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.835851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "437a18da-8fe4-478e-82a0-3b1a9da47df8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.835851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.835851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "437a18da-8fe4-478e-82a0-3b1a9da47df8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.835851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.835851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.839321] env[68285]: INFO nova.compute.manager [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Terminating instance [ 926.846828] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5beafafb-8eba-48be-8763-d9bc7ecefac1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.849232] env[68285]: DEBUG oslo_vmware.api [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] Waiting for the task: (returnval){ [ 926.849232] env[68285]: value = "task-2891495" [ 926.849232] env[68285]: _type = "Task" [ 926.849232] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.849473] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 926.849473] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529e9cee-e921-8ffd-748a-480278ba3138" [ 926.849473] env[68285]: _type = "Task" [ 926.849473] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.864788] env[68285]: INFO nova.compute.manager [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Took 47.10 seconds to build instance. [ 926.864788] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 926.864788] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d95142-3fa4-078f-b813-d9d645f38065" [ 926.864788] env[68285]: _type = "HttpNfcLease" [ 926.864788] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 926.864788] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 926.864788] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d95142-3fa4-078f-b813-d9d645f38065" [ 926.864788] env[68285]: _type = "HttpNfcLease" [ 926.864788] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 926.864788] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bcf3bf-b57e-4792-a347-78cd3808933b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.870022] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5239b2ca-726b-4b3a-7235-72b2cecdabd2, 'name': SearchDatastore_Task, 'duration_secs': 0.011389} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.877378] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.877715] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.879360] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891493, 'name': ReconfigVM_Task, 'duration_secs': 0.714521} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.883819] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c06dacc6-5e9d-474c-9eba-d81269c2464f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.887674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce79d437-cad2-49a6-afa1-09e93aeb73aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.892106] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Reconfigured VM instance instance-00000030 to attach disk [datastore2] d4f20336-9c29-4aac-8c0d-f577749cd7d7/d4f20336-9c29-4aac-8c0d-f577749cd7d7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.893181] env[68285]: DEBUG oslo_vmware.api [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] Task: {'id': task-2891495, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.895326] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c87e8d9-3972-4fde-afff-f847b6b82ba9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.901239] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52587eb3-3ae6-53a2-98e6-923fe9cd0bd1/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 926.901451] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52587eb3-3ae6-53a2-98e6-923fe9cd0bd1/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 926.915829] env[68285]: DEBUG nova.compute.provider_tree [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.917979] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529e9cee-e921-8ffd-748a-480278ba3138, 'name': SearchDatastore_Task, 'duration_secs': 0.041467} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.921648] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.921859] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.922107] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.922637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.922892] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.923378] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 926.923378] env[68285]: value = "task-2891496" [ 926.923378] env[68285]: _type = "Task" [ 926.923378] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.923614] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 926.923614] env[68285]: value = "task-2891497" [ 926.923614] env[68285]: _type = "Task" [ 926.923614] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.982093] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5e31a9e-4eda-476a-9beb-7447deda16c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.002223] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.006977] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.007254] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 927.008588] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891497, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.008971] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60dc3984-af06-4e6d-9206-6063441a3de9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.016374] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 927.016374] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5254d010-e782-1ccb-3bac-284fdbeb079c" [ 927.016374] env[68285]: _type = "Task" [ 927.016374] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.026738] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5254d010-e782-1ccb-3bac-284fdbeb079c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.031956] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-97c2f5fb-3bde-4b7d-a1b1-9321c58ddc9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.359679] env[68285]: DEBUG nova.compute.manager [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 927.360134] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.361726] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972d5763-b1e9-4ceb-92f6-32c02d2ee862 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.366479] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eb0d643a-e87c-4eee-be78-2d3b93bf2d07 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.624s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.371428] env[68285]: DEBUG oslo_vmware.api [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] Task: {'id': task-2891495, 'name': ReconfigVM_Task, 'duration_secs': 0.189647} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.373895] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfe91fe-95fe-49d8-a920-99af9e140929 tempest-ServersAdminTestJSON-1536557057 tempest-ServersAdminTestJSON-1536557057-project-admin] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Reconfigured VM instance to set the machine id {{(pid=68285) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 927.374388] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.374658] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3edee1b-a203-43e6-bd4b-f5242e6d54aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.383432] env[68285]: DEBUG oslo_vmware.api [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 927.383432] env[68285]: value = "task-2891498" [ 927.383432] env[68285]: _type = "Task" [ 927.383432] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.392923] env[68285]: DEBUG oslo_vmware.api [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.419599] env[68285]: DEBUG nova.scheduler.client.report [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.499365] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534863} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.503354] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.503697] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.504029] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891497, 'name': Rename_Task, 'duration_secs': 0.214427} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.507341] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95bcc647-1b6d-4b0d-8caf-3caf6881f64c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.510477] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.511164] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12fa04a8-8cca-4537-8db2-eb6804aca499 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.518530] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 927.518530] env[68285]: value = "task-2891499" [ 927.518530] env[68285]: _type = "Task" [ 927.518530] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.523360] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 927.523360] env[68285]: value = "task-2891500" [ 927.523360] env[68285]: _type = "Task" [ 927.523360] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.540905] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5254d010-e782-1ccb-3bac-284fdbeb079c, 'name': SearchDatastore_Task, 'duration_secs': 0.009226} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.546897] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a153d99-bb43-4de1-ae35-ce3ef068f3cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.551787] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.556624] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891500, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.560870] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 927.560870] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523b2d28-5ab3-4829-f1cf-b903be9cdfba" [ 927.560870] env[68285]: _type = "Task" [ 927.560870] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.570180] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523b2d28-5ab3-4829-f1cf-b903be9cdfba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.856025] env[68285]: DEBUG oslo_concurrency.lockutils [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.856240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.894068] env[68285]: DEBUG oslo_vmware.api [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891498, 'name': PowerOffVM_Task, 'duration_secs': 0.237945} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.894798] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.894798] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.894934] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d48d744-319d-4b56-a804-cd97ae9ab5fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.924899] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.882s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.927897] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.758s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.929619] env[68285]: INFO nova.compute.claims [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.954962] env[68285]: INFO nova.scheduler.client.report [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Deleted allocations for instance 753bb2f7-bf0a-401e-81af-93982558d3b7 [ 927.985382] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.985382] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.985382] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleting the datastore file [datastore2] 437a18da-8fe4-478e-82a0-3b1a9da47df8 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.985732] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02e1bc56-7dea-44ca-9f81-c4441b2d0b0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.994312] env[68285]: DEBUG oslo_vmware.api [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 927.994312] env[68285]: value = "task-2891502" [ 927.994312] env[68285]: _type = "Task" [ 927.994312] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.005496] env[68285]: DEBUG oslo_vmware.api [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.011907] env[68285]: INFO nova.compute.manager [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Rebuilding instance [ 928.040391] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081069} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.041779] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.043294] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75734c10-5b10-4deb-8030-a7bbd003b3d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.050672] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891500, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.078891] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.085922] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e60d9d1-ffca-4dea-acab-666e7128f2d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.105400] env[68285]: DEBUG nova.compute.manager [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.107298] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9dc6fd-8d97-469e-8d3f-8d6ff30407b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.113866] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523b2d28-5ab3-4829-f1cf-b903be9cdfba, 'name': SearchDatastore_Task, 'duration_secs': 0.010954} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.115648] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.115944] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] c7ab28c3-a316-4685-b876-a0e7c657ec35/c7ab28c3-a316-4685-b876-a0e7c657ec35.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 928.116367] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 928.116367] env[68285]: value = "task-2891503" [ 928.116367] env[68285]: _type = "Task" [ 928.116367] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.116618] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3740002b-2fc6-4f9d-8d34-e4e516dc51e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.130995] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 928.130995] env[68285]: value = "task-2891504" [ 928.130995] env[68285]: _type = "Task" [ 928.130995] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.134105] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891503, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.143072] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.359723] env[68285]: DEBUG nova.compute.utils [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 928.464027] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4fafadcd-0ad9-4d02-ab32-9ac085c4260f tempest-InstanceActionsV221TestJSON-1749525043 tempest-InstanceActionsV221TestJSON-1749525043-project-member] Lock "753bb2f7-bf0a-401e-81af-93982558d3b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.413s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.506033] env[68285]: DEBUG oslo_vmware.api [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169226} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.506033] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.506033] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.506033] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.506313] env[68285]: INFO nova.compute.manager [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 928.506357] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.506525] env[68285]: DEBUG nova.compute.manager [-] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 928.506622] env[68285]: DEBUG nova.network.neutron [-] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.539523] env[68285]: DEBUG oslo_vmware.api [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891500, 'name': PowerOnVM_Task, 'duration_secs': 0.775012} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.539953] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.540170] env[68285]: INFO nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Took 9.55 seconds to spawn the instance on the hypervisor. [ 928.540355] env[68285]: DEBUG nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.541244] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71221798-8675-4dc6-a913-320c4af34b45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.632791] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891503, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.643164] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891504, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498037} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.643459] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] c7ab28c3-a316-4685-b876-a0e7c657ec35/c7ab28c3-a316-4685-b876-a0e7c657ec35.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.643703] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 928.643977] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fff01e90-f9b6-4315-aa43-f9c2225c95ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.651128] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 928.651128] env[68285]: value = "task-2891505" [ 928.651128] env[68285]: _type = "Task" [ 928.651128] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.659409] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.866081] env[68285]: DEBUG oslo_concurrency.lockutils [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.061230] env[68285]: INFO nova.compute.manager [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Took 40.60 seconds to build instance. [ 929.131656] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891503, 'name': ReconfigVM_Task, 'duration_secs': 0.73101} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.131782] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Reconfigured VM instance instance-0000002d to attach disk [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24/d1b5abfa-fd38-4d17-b75f-5036af841d24.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.132320] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-567ef133-750f-44ee-8e09-fb3190023e12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.134229] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 929.134455] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48858eb9-2f19-46f6-9d23-692fb4865ce5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.141619] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 929.141619] env[68285]: value = "task-2891506" [ 929.141619] env[68285]: _type = "Task" [ 929.141619] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.142450] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 929.142450] env[68285]: value = "task-2891507" [ 929.142450] env[68285]: _type = "Task" [ 929.142450] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.162264] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891506, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.162514] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.169895] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073608} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.170202] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 929.171317] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e13c7e-b9a8-4a8f-b831-4bfd25a8b3e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.194998] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] c7ab28c3-a316-4685-b876-a0e7c657ec35/c7ab28c3-a316-4685-b876-a0e7c657ec35.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 929.198027] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86db70b5-5869-4610-8036-e0d22c2da948 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.219119] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 929.219119] env[68285]: value = "task-2891508" [ 929.219119] env[68285]: _type = "Task" [ 929.219119] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.230701] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891508, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.435374] env[68285]: DEBUG nova.network.neutron [-] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.474582] env[68285]: DEBUG nova.compute.manager [req-91f803ac-7f6d-462d-9514-e4452aba2c76 req-4c145957-d572-435b-9199-0d496a415215 service nova] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Received event network-vif-deleted-1f09289d-57ed-49a6-a446-81bdfee585f3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.531528] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd10550-2f4d-4942-bc32-0456ceed2049 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.539708] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9da371-5042-43eb-a98c-c729020aa37e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.569687] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0ccabd15-0097-40e5-86ae-eafc15f751bd tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.125s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.570875] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210e5da7-cfb3-4ad5-8400-4d2af59532ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.578429] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d132f929-76bb-43b7-85ee-57c796d2ab5c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.595335] env[68285]: DEBUG nova.compute.provider_tree [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.655367] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891506, 'name': Rename_Task, 'duration_secs': 0.234997} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.656305] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.656695] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8da189b9-b0e6-491d-98e6-aeb42ac1469f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.661889] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891507, 'name': PowerOffVM_Task, 'duration_secs': 0.370848} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.662621] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.663936] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 929.663936] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8373d0b-ff07-4b15-8361-bdbc537f28a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.667916] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 929.667916] env[68285]: value = "task-2891509" [ 929.667916] env[68285]: _type = "Task" [ 929.667916] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.673230] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.673797] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71999bf1-b93d-4aff-9b19-dc6ed62e8c1a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.678660] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.731116] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891508, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.753744] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.754193] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.754319] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleting the datastore file [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.754618] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4ecb4d3-ba1e-4fba-85eb-d3649330a519 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.764883] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 929.764883] env[68285]: value = "task-2891511" [ 929.764883] env[68285]: _type = "Task" [ 929.764883] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.772389] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.941687] env[68285]: INFO nova.compute.manager [-] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Took 1.43 seconds to deallocate network for instance. [ 929.951235] env[68285]: DEBUG oslo_concurrency.lockutils [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.951509] env[68285]: DEBUG oslo_concurrency.lockutils [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.951739] env[68285]: INFO nova.compute.manager [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Attaching volume 9c9ab8e5-af3d-4467-aa6d-9969e7562dfb to /dev/sdb [ 929.990206] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7aed90-a8a9-4d70-ac23-6e63c0698c5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.001426] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7280b175-e937-44b6-b7e5-fd06a648aeb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.015957] env[68285]: DEBUG nova.virt.block_device [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Updating existing volume attachment record: e3347607-8fea-4056-a181-9c9abbffbda5 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 930.098452] env[68285]: DEBUG nova.scheduler.client.report [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.179092] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891509, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.231808] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891508, 'name': ReconfigVM_Task, 'duration_secs': 0.608355} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.232018] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Reconfigured VM instance instance-00000031 to attach disk [datastore1] c7ab28c3-a316-4685-b876-a0e7c657ec35/c7ab28c3-a316-4685-b876-a0e7c657ec35.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.232494] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2d80a8b-458d-41be-8fab-786e0707894d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.239411] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 930.239411] env[68285]: value = "task-2891513" [ 930.239411] env[68285]: _type = "Task" [ 930.239411] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.247785] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891513, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.274158] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141504} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.274912] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 930.274912] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 930.275211] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 930.455463] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.605936] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.606472] env[68285]: DEBUG nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 930.609421] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.448s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.609580] env[68285]: DEBUG nova.objects.instance [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lazy-loading 'resources' on Instance uuid 324cc3e5-1c81-498e-b520-e9fca26013ef {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.679473] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891509, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.751448] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891513, 'name': Rename_Task, 'duration_secs': 0.222029} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.751938] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.752241] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa75ead1-383c-455b-8a40-d24eac340929 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.759033] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 930.759033] env[68285]: value = "task-2891516" [ 930.759033] env[68285]: _type = "Task" [ 930.759033] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.771080] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.113252] env[68285]: DEBUG nova.compute.utils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 931.117952] env[68285]: DEBUG nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 931.117952] env[68285]: DEBUG nova.network.neutron [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.170948] env[68285]: DEBUG nova.policy [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c07415c552542bda58552ad79163d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb8e48ceae0748b0b8c762ab7303a4b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 931.183321] env[68285]: DEBUG oslo_vmware.api [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891509, 'name': PowerOnVM_Task, 'duration_secs': 1.316393} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.183599] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 931.183806] env[68285]: DEBUG nova.compute.manager [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 931.184607] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3c7ef7-6f05-4ca1-8cf4-7bb2908c324a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.269385] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.321154] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 931.321408] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.322115] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 931.322115] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.322115] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 931.322115] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 931.322255] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 931.322395] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 931.322556] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 931.322712] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 931.322884] env[68285]: DEBUG nova.virt.hardware [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 931.323743] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a079b5-70d4-43c3-9c75-7076f207345a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.333718] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d69c4c2-e89d-4de8-a8de-f9024350d349 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.350222] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:d3:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c317408-dd23-42c9-a837-c59782c5654a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.357349] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.357814] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 931.358054] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0292c276-3d92-4d75-9ef2-262364f916d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.379753] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.379753] env[68285]: value = "task-2891517" [ 931.379753] env[68285]: _type = "Task" [ 931.379753] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.387795] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891517, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.618759] env[68285]: DEBUG nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 931.623858] env[68285]: DEBUG nova.compute.manager [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Received event network-changed-73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 931.625794] env[68285]: DEBUG nova.compute.manager [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Refreshing instance network info cache due to event network-changed-73717dde-af77-47f8-896b-24153f94b949. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 931.626221] env[68285]: DEBUG oslo_concurrency.lockutils [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] Acquiring lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.626403] env[68285]: DEBUG oslo_concurrency.lockutils [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] Acquired lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.626576] env[68285]: DEBUG nova.network.neutron [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Refreshing network info cache for port 73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.713118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.753424] env[68285]: DEBUG nova.network.neutron [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Successfully created port: 046ad230-e38b-43a3-ab69-77b20f182d76 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.781089] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.866895] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ba2f3a-8911-4eb6-84fa-cf280e6c9cfd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.874845] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdeea55-ef4d-4b14-a9ca-80f59f0577eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.909194] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0cccb21-bfff-4aea-aab5-7c6b3c68f733 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.915274] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891517, 'name': CreateVM_Task, 'duration_secs': 0.504212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.915926] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.916797] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.917371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.917371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 931.919588] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3926165-d591-4125-8fe9-7b73487431f7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.923687] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d802eb-f818-4187-8fbf-3f7706925dde {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.931115] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 931.931115] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d9c2cb-23c3-3835-5da0-55cbc8909536" [ 931.931115] env[68285]: _type = "Task" [ 931.931115] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.940196] env[68285]: DEBUG nova.compute.provider_tree [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.950310] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d9c2cb-23c3-3835-5da0-55cbc8909536, 'name': SearchDatastore_Task, 'duration_secs': 0.010378} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.950612] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.950839] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.951084] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.951231] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.951407] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.951682] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca4418c3-925e-4ea7-be2e-e746d82c9f9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.960595] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.960846] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.961623] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1de9014f-1617-41d1-9b71-8d69d32c4510 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.969653] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 931.969653] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e9ff06-ce3f-f29b-cd8c-2a50e13f6632" [ 931.969653] env[68285]: _type = "Task" [ 931.969653] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.980992] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e9ff06-ce3f-f29b-cd8c-2a50e13f6632, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.273902] env[68285]: DEBUG oslo_vmware.api [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891516, 'name': PowerOnVM_Task, 'duration_secs': 1.285176} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.274271] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.274390] env[68285]: INFO nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Took 7.33 seconds to spawn the instance on the hypervisor. [ 932.274565] env[68285]: DEBUG nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 932.275367] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29f7a06-adf3-4f21-8291-b00b76df6354 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.390549] env[68285]: DEBUG nova.network.neutron [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updated VIF entry in instance network info cache for port 73717dde-af77-47f8-896b-24153f94b949. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.390909] env[68285]: DEBUG nova.network.neutron [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updating instance_info_cache with network_info: [{"id": "73717dde-af77-47f8-896b-24153f94b949", "address": "fa:16:3e:34:92:a1", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73717dde-af", "ovs_interfaceid": "73717dde-af77-47f8-896b-24153f94b949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.446570] env[68285]: DEBUG nova.scheduler.client.report [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.480802] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e9ff06-ce3f-f29b-cd8c-2a50e13f6632, 'name': SearchDatastore_Task, 'duration_secs': 0.012056} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.481617] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30c050ce-cb12-4f0c-af92-0735452dbb47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.487140] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 932.487140] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5286567a-4a5d-0a5b-2216-2728140fd28a" [ 932.487140] env[68285]: _type = "Task" [ 932.487140] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.495256] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5286567a-4a5d-0a5b-2216-2728140fd28a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.638714] env[68285]: DEBUG nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 932.685375] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 932.685641] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.685828] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.687310] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.687310] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.687310] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 932.687310] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 932.687310] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 932.687310] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 932.687569] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 932.687829] env[68285]: DEBUG nova.virt.hardware [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 932.688831] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2742de-98b3-4c25-b558-5cdcb2370919 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.698916] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5096559e-0afb-4fce-b203-473c158bd2a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.802825] env[68285]: INFO nova.compute.manager [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Took 37.20 seconds to build instance. [ 932.897700] env[68285]: DEBUG oslo_concurrency.lockutils [req-a34ccc46-df24-4a58-9bd1-7b02857de3ba req-db5805e9-a6c3-4fdf-b40c-271a1fb9128e service nova] Releasing lock "refresh_cache-d4f20336-9c29-4aac-8c0d-f577749cd7d7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.956886] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.347s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.959013] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.314s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.960526] env[68285]: INFO nova.compute.claims [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.003020] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5286567a-4a5d-0a5b-2216-2728140fd28a, 'name': SearchDatastore_Task, 'duration_secs': 0.011184} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.003020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.003020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 933.003020] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34db502b-e80c-477a-af24-69fe230268c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.005352] env[68285]: INFO nova.scheduler.client.report [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleted allocations for instance 324cc3e5-1c81-498e-b520-e9fca26013ef [ 933.016240] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 933.016240] env[68285]: value = "task-2891519" [ 933.016240] env[68285]: _type = "Task" [ 933.016240] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.029305] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891519, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.161821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "d1b5abfa-fd38-4d17-b75f-5036af841d24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.161821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.161821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "d1b5abfa-fd38-4d17-b75f-5036af841d24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.161821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.161821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.163315] env[68285]: INFO nova.compute.manager [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Terminating instance [ 933.306712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc765635-7ba7-41cd-91fa-a874e5d3c671 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "c7ab28c3-a316-4685-b876-a0e7c657ec35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.652s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.348016] env[68285]: DEBUG nova.network.neutron [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Successfully updated port: 046ad230-e38b-43a3-ab69-77b20f182d76 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.515450] env[68285]: DEBUG oslo_concurrency.lockutils [None req-705880d9-3e13-4f89-9d94-f9cbfbbe5089 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "324cc3e5-1c81-498e-b520-e9fca26013ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.336s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.529709] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891519, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505093} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.530057] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.530318] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.530596] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c27c7748-e298-44bd-acc2-fad4b120733b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.542845] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 933.542845] env[68285]: value = "task-2891520" [ 933.542845] env[68285]: _type = "Task" [ 933.542845] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.553353] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.670840] env[68285]: DEBUG nova.compute.manager [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 933.671166] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 933.671958] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c95b090-33dc-4cc3-b94c-eba79465c0c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.680894] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 933.681154] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05411db1-1182-49e3-9630-0ba2999bc908 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.691515] env[68285]: DEBUG oslo_vmware.api [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 933.691515] env[68285]: value = "task-2891521" [ 933.691515] env[68285]: _type = "Task" [ 933.691515] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.701289] env[68285]: DEBUG oslo_vmware.api [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.854064] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "refresh_cache-c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.854271] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired lock "refresh_cache-c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.854388] env[68285]: DEBUG nova.network.neutron [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.927132] env[68285]: DEBUG nova.compute.manager [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Received event network-vif-plugged-046ad230-e38b-43a3-ab69-77b20f182d76 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.927132] env[68285]: DEBUG oslo_concurrency.lockutils [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] Acquiring lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.928108] env[68285]: DEBUG oslo_concurrency.lockutils [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.928451] env[68285]: DEBUG oslo_concurrency.lockutils [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.929357] env[68285]: DEBUG nova.compute.manager [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] No waiting events found dispatching network-vif-plugged-046ad230-e38b-43a3-ab69-77b20f182d76 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 933.929357] env[68285]: WARNING nova.compute.manager [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Received unexpected event network-vif-plugged-046ad230-e38b-43a3-ab69-77b20f182d76 for instance with vm_state building and task_state spawning. [ 933.929537] env[68285]: DEBUG nova.compute.manager [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Received event network-changed-046ad230-e38b-43a3-ab69-77b20f182d76 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.929868] env[68285]: DEBUG nova.compute.manager [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Refreshing instance network info cache due to event network-changed-046ad230-e38b-43a3-ab69-77b20f182d76. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 933.930235] env[68285]: DEBUG oslo_concurrency.lockutils [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] Acquiring lock "refresh_cache-c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.058990] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091504} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.059319] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.060925] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f86f78-4599-4fcd-b6f6-c2ff9a814f2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.084969] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.088318] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b22447da-e097-40f4-8496-218e1097a44c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.110669] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 934.110669] env[68285]: value = "task-2891522" [ 934.110669] env[68285]: _type = "Task" [ 934.110669] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.129094] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.208052] env[68285]: DEBUG oslo_vmware.api [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891521, 'name': PowerOffVM_Task, 'duration_secs': 0.431854} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.212215] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 934.213310] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.213310] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d12c802-9166-437a-9148-e9373da45ce3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.271023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.271236] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.285028] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 934.285028] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 934.285028] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleting the datastore file [datastore1] d1b5abfa-fd38-4d17-b75f-5036af841d24 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.285028] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c149fcad-2937-499f-bde5-cf4e6f9fc7a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.295770] env[68285]: DEBUG oslo_vmware.api [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 934.295770] env[68285]: value = "task-2891524" [ 934.295770] env[68285]: _type = "Task" [ 934.295770] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.304566] env[68285]: DEBUG oslo_vmware.api [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.410847] env[68285]: DEBUG nova.network.neutron [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.574877] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 934.575059] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580913', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'name': 'volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a1cc678-2bb2-403e-b6e8-afdeb8362eac', 'attached_at': '', 'detached_at': '', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'serial': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 934.576020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11e39c3-9643-4b47-8f2d-ac81cfd60346 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.605115] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d270a24-7397-40b8-bbdd-90e6d964b030 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.611827] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97586810-e301-46df-9b89-011cb4a6ced1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.643734] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb/volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.645102] env[68285]: DEBUG nova.network.neutron [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Updating instance_info_cache with network_info: [{"id": "046ad230-e38b-43a3-ab69-77b20f182d76", "address": "fa:16:3e:7a:44:23", "network": {"id": "d9bb1a73-a8c2-4023-87f2-76bdb79f714a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-108279850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb8e48ceae0748b0b8c762ab7303a4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046ad230-e3", "ovs_interfaceid": "046ad230-e38b-43a3-ab69-77b20f182d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.648239] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38b9c357-71a0-498a-ade4-6d193db61bb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.669701] env[68285]: DEBUG oslo_concurrency.lockutils [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.669980] env[68285]: DEBUG oslo_concurrency.lockutils [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.676023] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78815c86-107b-42f2-ad9b-0fcbeb2d9a9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.683078] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891522, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.683516] env[68285]: DEBUG oslo_vmware.api [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 934.683516] env[68285]: value = "task-2891525" [ 934.683516] env[68285]: _type = "Task" [ 934.683516] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.716819] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dba25f-2dcb-4c83-bea1-f01e219a7265 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.725241] env[68285]: DEBUG oslo_vmware.api [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.729620] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1907dd66-27a9-4301-b5eb-a91c322a08bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.746484] env[68285]: DEBUG nova.compute.provider_tree [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.775386] env[68285]: DEBUG nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 934.807264] env[68285]: DEBUG oslo_vmware.api [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151118} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.807264] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 934.807597] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 934.807805] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 934.807996] env[68285]: INFO nova.compute.manager [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Took 1.14 seconds to destroy the instance on the hypervisor. [ 934.808253] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 934.808480] env[68285]: DEBUG nova.compute.manager [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 934.808581] env[68285]: DEBUG nova.network.neutron [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 934.954212] env[68285]: DEBUG nova.compute.manager [None req-8bc7101c-8269-4788-ad85-d611c0e29072 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.954983] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da46c82-17ca-43e0-8a4a-0bd13e78665c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.122788] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891522, 'name': ReconfigVM_Task, 'duration_secs': 0.602905} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.123125] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Reconfigured VM instance instance-0000000b to attach disk [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.124546] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21a0cf34-eb79-4bcd-9abd-479c49e3282f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.133649] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 935.133649] env[68285]: value = "task-2891526" [ 935.133649] env[68285]: _type = "Task" [ 935.133649] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.141179] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891526, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.167093] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Releasing lock "refresh_cache-c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.167428] env[68285]: DEBUG nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Instance network_info: |[{"id": "046ad230-e38b-43a3-ab69-77b20f182d76", "address": "fa:16:3e:7a:44:23", "network": {"id": "d9bb1a73-a8c2-4023-87f2-76bdb79f714a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-108279850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb8e48ceae0748b0b8c762ab7303a4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046ad230-e3", "ovs_interfaceid": "046ad230-e38b-43a3-ab69-77b20f182d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 935.168815] env[68285]: DEBUG oslo_concurrency.lockutils [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] Acquired lock "refresh_cache-c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.169025] env[68285]: DEBUG nova.network.neutron [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Refreshing network info cache for port 046ad230-e38b-43a3-ab69-77b20f182d76 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.170219] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:44:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '713e54d5-283f-493d-b003-f13182deaf7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '046ad230-e38b-43a3-ab69-77b20f182d76', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.178578] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 935.179412] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 935.179662] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bf44417-9188-43b4-9093-b822a8638167 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.197801] env[68285]: DEBUG nova.compute.utils [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 935.209627] env[68285]: DEBUG oslo_vmware.api [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.211337] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.211337] env[68285]: value = "task-2891527" [ 935.211337] env[68285]: _type = "Task" [ 935.211337] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.225120] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891527, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.248623] env[68285]: DEBUG nova.scheduler.client.report [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.306913] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.468488] env[68285]: INFO nova.compute.manager [None req-8bc7101c-8269-4788-ad85-d611c0e29072 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] instance snapshotting [ 935.469162] env[68285]: DEBUG nova.objects.instance [None req-8bc7101c-8269-4788-ad85-d611c0e29072 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lazy-loading 'flavor' on Instance uuid c7ab28c3-a316-4685-b876-a0e7c657ec35 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.647091] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891526, 'name': Rename_Task, 'duration_secs': 0.218571} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.647488] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.647651] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86ce71fd-a3d9-4870-8442-1cfd5b7e95cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.655427] env[68285]: DEBUG nova.network.neutron [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.656031] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 935.656031] env[68285]: value = "task-2891528" [ 935.656031] env[68285]: _type = "Task" [ 935.656031] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.664917] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.706769] env[68285]: DEBUG oslo_concurrency.lockutils [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.037s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.713710] env[68285]: DEBUG oslo_vmware.api [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891525, 'name': ReconfigVM_Task, 'duration_secs': 0.642225} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.717428] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Reconfigured VM instance instance-0000002f to attach disk [datastore1] volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb/volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.728022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "c7ab28c3-a316-4685-b876-a0e7c657ec35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.728022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "c7ab28c3-a316-4685-b876-a0e7c657ec35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.728022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "c7ab28c3-a316-4685-b876-a0e7c657ec35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.728022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "c7ab28c3-a316-4685-b876-a0e7c657ec35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.728022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "c7ab28c3-a316-4685-b876-a0e7c657ec35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.728022] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5422ca5d-93cc-4662-8f2c-18a775937ea1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.739225] env[68285]: INFO nova.compute.manager [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Terminating instance [ 935.757918] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.799s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.759968] env[68285]: DEBUG nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 935.762445] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891527, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.764838] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.699s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.765079] env[68285]: DEBUG nova.objects.instance [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lazy-loading 'resources' on Instance uuid 12fad42a-1011-4563-b11f-7b141b2a1670 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.767961] env[68285]: DEBUG oslo_vmware.api [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 935.767961] env[68285]: value = "task-2891529" [ 935.767961] env[68285]: _type = "Task" [ 935.767961] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.780074] env[68285]: DEBUG oslo_vmware.api [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891529, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.979461] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0332fab7-875e-4f13-9f19-646c0ba8be46 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.997235] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4525475a-81a0-40b5-a6e4-9c5dfad56879 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.020807] env[68285]: DEBUG nova.network.neutron [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Updated VIF entry in instance network info cache for port 046ad230-e38b-43a3-ab69-77b20f182d76. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 936.021431] env[68285]: DEBUG nova.network.neutron [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Updating instance_info_cache with network_info: [{"id": "046ad230-e38b-43a3-ab69-77b20f182d76", "address": "fa:16:3e:7a:44:23", "network": {"id": "d9bb1a73-a8c2-4023-87f2-76bdb79f714a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-108279850-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb8e48ceae0748b0b8c762ab7303a4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "713e54d5-283f-493d-b003-f13182deaf7b", "external-id": "cl2-zone-703", "segmentation_id": 703, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046ad230-e3", "ovs_interfaceid": "046ad230-e38b-43a3-ab69-77b20f182d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.160397] env[68285]: INFO nova.compute.manager [-] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Took 1.35 seconds to deallocate network for instance. [ 936.169977] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891528, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.225837] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891527, 'name': CreateVM_Task, 'duration_secs': 0.584502} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.225837] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 936.225837] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.225837] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.225837] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 936.225837] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7a43c77-7f5b-49f0-9188-276c046b799c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.234820] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 936.234820] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5250d5b2-f367-441d-2c27-f0a22fabb7b5" [ 936.234820] env[68285]: _type = "Task" [ 936.234820] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.243914] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5250d5b2-f367-441d-2c27-f0a22fabb7b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.248432] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "refresh_cache-c7ab28c3-a316-4685-b876-a0e7c657ec35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.248432] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquired lock "refresh_cache-c7ab28c3-a316-4685-b876-a0e7c657ec35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.248432] env[68285]: DEBUG nova.network.neutron [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.272636] env[68285]: DEBUG nova.compute.utils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 936.279054] env[68285]: DEBUG nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 936.279054] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 936.289315] env[68285]: DEBUG oslo_vmware.api [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891529, 'name': ReconfigVM_Task, 'duration_secs': 0.201468} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.290213] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580913', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'name': 'volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a1cc678-2bb2-403e-b6e8-afdeb8362eac', 'attached_at': '', 'detached_at': '', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'serial': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 936.338561] env[68285]: DEBUG nova.policy [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e71174a58844a39ad622581c1503028', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae4430b997b4480abbf2c5fce71cca04', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 936.507900] env[68285]: DEBUG nova.compute.manager [None req-8bc7101c-8269-4788-ad85-d611c0e29072 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Instance disappeared during snapshot {{(pid=68285) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 936.525320] env[68285]: DEBUG oslo_concurrency.lockutils [req-84bbc671-7bbc-48a0-b365-a21060cd3e1d req-3183d83a-164d-4f90-b375-6bdf7d632c7b service nova] Releasing lock "refresh_cache-c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.591353] env[68285]: DEBUG nova.compute.manager [req-4fbf98bc-0ec3-453c-a6d1-be8566e2882e req-af0281f8-c46b-4530-a060-71510250c8e3 service nova] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Received event network-vif-deleted-001d3974-db8e-494c-b536-d7415394a0e5 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 936.643588] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "32d23c62-23ec-4732-a95d-6ac32805e1b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.644272] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.677396] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.679840] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52587eb3-3ae6-53a2-98e6-923fe9cd0bd1/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 936.685405] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1000106-0e61-4c0a-8da9-eb5669a6015a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.691227] env[68285]: DEBUG oslo_vmware.api [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891528, 'name': PowerOnVM_Task, 'duration_secs': 0.753056} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.693027] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "9569d50c-d358-4cc5-a106-32da785e4765" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.693027] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "9569d50c-d358-4cc5-a106-32da785e4765" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.693389] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.693389] env[68285]: DEBUG nova.compute.manager [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.698116] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151af437-72aa-45e0-97eb-697326dba8ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.704398] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52587eb3-3ae6-53a2-98e6-923fe9cd0bd1/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 936.704611] env[68285]: ERROR oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52587eb3-3ae6-53a2-98e6-923fe9cd0bd1/disk-0.vmdk due to incomplete transfer. [ 936.705286] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b9f8f84e-1f30-4b15-85c3-ad8644e57e7a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.719765] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52587eb3-3ae6-53a2-98e6-923fe9cd0bd1/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 936.720088] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Uploaded image 99446637-e427-4a35-9dd2-98fd5edf8d53 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 936.722124] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 936.727859] env[68285]: DEBUG nova.compute.manager [None req-8bc7101c-8269-4788-ad85-d611c0e29072 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Found 0 images (rotation: 2) {{(pid=68285) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 936.728869] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ccc71ea4-ab2f-46cf-b779-7ea960cefefd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.732724] env[68285]: DEBUG oslo_concurrency.lockutils [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.732724] env[68285]: DEBUG oslo_concurrency.lockutils [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.733142] env[68285]: INFO nova.compute.manager [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Attaching volume f568d87f-424d-4432-b017-6f7542b87545 to /dev/sdb [ 936.735116] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Successfully created port: 5728aafe-c57f-4eb7-a866-ad586f491645 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.741641] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 936.741641] env[68285]: value = "task-2891530" [ 936.741641] env[68285]: _type = "Task" [ 936.741641] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.760508] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5250d5b2-f367-441d-2c27-f0a22fabb7b5, 'name': SearchDatastore_Task, 'duration_secs': 0.01113} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.761987] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.762251] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 936.765017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.765017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.765017] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.766608] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90daa1b6-82de-4dae-a877-6763f68e8204 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.769039] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891530, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.777197] env[68285]: DEBUG nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 936.781740] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.781872] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 936.783202] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1380d04-7324-40d9-bc2e-d44c45b0da74 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.786962] env[68285]: DEBUG nova.network.neutron [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.788649] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7abad9f5-15c5-4539-9b16-b07aa687a9a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.798189] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc2e249-1f17-40ba-b20f-3f58d4da1e2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.801719] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 936.801719] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a016ca-5078-e352-9830-75d81ff17ffb" [ 936.801719] env[68285]: _type = "Task" [ 936.801719] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.810440] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a016ca-5078-e352-9830-75d81ff17ffb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.815881] env[68285]: DEBUG nova.virt.block_device [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updating existing volume attachment record: 2b722277-15c1-4097-ad21-48354fed532b {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 936.889391] env[68285]: DEBUG nova.network.neutron [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.948717] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad8338e-5765-4a9f-a323-1912f2b229b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.956678] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ed5c98-4b23-4f6f-9eec-da6eee85ebb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.988988] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bc0c8f-57fa-455a-94af-fd3ce32daf09 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.996668] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae1fe70-68fb-4b3b-ab97-95c0f814695c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.010255] env[68285]: DEBUG nova.compute.provider_tree [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.089234] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Successfully created port: 50c33bdf-4dea-4c86-9423-a50e9db0b741 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 937.150998] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.194522] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.221573] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.258368] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891530, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.316984] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a016ca-5078-e352-9830-75d81ff17ffb, 'name': SearchDatastore_Task, 'duration_secs': 0.009439} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.317858] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c84a2a32-288e-4b9d-a23a-864ab22a1dd2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.324914] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 937.324914] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c328d6-e08b-ed33-a986-9363eccfbba3" [ 937.324914] env[68285]: _type = "Task" [ 937.324914] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.336157] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c328d6-e08b-ed33-a986-9363eccfbba3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.349510] env[68285]: DEBUG nova.objects.instance [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'flavor' on Instance uuid 2a1cc678-2bb2-403e-b6e8-afdeb8362eac {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.394948] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Releasing lock "refresh_cache-c7ab28c3-a316-4685-b876-a0e7c657ec35" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.395579] env[68285]: DEBUG nova.compute.manager [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 937.395923] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.397085] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25efbef-364e-4ae1-9781-21292fbd21bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.405936] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.406139] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc96d87d-b537-4955-b219-1f1303c01a0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.413255] env[68285]: DEBUG oslo_vmware.api [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 937.413255] env[68285]: value = "task-2891534" [ 937.413255] env[68285]: _type = "Task" [ 937.413255] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.421564] env[68285]: DEBUG oslo_vmware.api [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.512869] env[68285]: DEBUG nova.scheduler.client.report [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.672235] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.719084] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.755531] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891530, 'name': Destroy_Task, 'duration_secs': 0.823232} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.758303] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Destroyed the VM [ 937.758583] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 937.759202] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "64103f25-6411-44be-a60f-b9c276dba331" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.759418] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "64103f25-6411-44be-a60f-b9c276dba331" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.759622] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0724d58f-5781-4b16-bbfa-4162a869f215 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.767765] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 937.767765] env[68285]: value = "task-2891535" [ 937.767765] env[68285]: _type = "Task" [ 937.767765] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.776377] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891535, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.794265] env[68285]: DEBUG nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 937.817795] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 937.818114] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.818328] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 937.818550] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.818745] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 937.818927] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 937.819189] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 937.819361] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 937.819573] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 937.819801] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 937.820022] env[68285]: DEBUG nova.virt.hardware [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 937.820946] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4187c07a-9872-4c45-af43-b4631a19c7af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.833116] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33fcc9c-bf78-44ca-9d54-264b12cc0189 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.840295] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c328d6-e08b-ed33-a986-9363eccfbba3, 'name': SearchDatastore_Task, 'duration_secs': 0.019265} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.840906] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.841211] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e/c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 937.841487] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69f25416-0469-43d0-bab8-0e294e4921da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.854886] env[68285]: DEBUG oslo_concurrency.lockutils [None req-935e72d1-49cf-44fc-b74a-07a25fc09bbf tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.903s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.856957] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 937.856957] env[68285]: value = "task-2891536" [ 937.856957] env[68285]: _type = "Task" [ 937.856957] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.865820] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.922771] env[68285]: DEBUG oslo_vmware.api [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891534, 'name': PowerOffVM_Task, 'duration_secs': 0.181943} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.923048] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.923214] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.924667] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83793820-fcc9-4831-a769-cb425c203464 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.950769] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.951033] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.951253] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Deleting the datastore file [datastore1] c7ab28c3-a316-4685-b876-a0e7c657ec35 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.951546] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b589c76b-e517-4123-b357-23b306a4be7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.958784] env[68285]: DEBUG oslo_vmware.api [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for the task: (returnval){ [ 937.958784] env[68285]: value = "task-2891538" [ 937.958784] env[68285]: _type = "Task" [ 937.958784] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.967130] env[68285]: DEBUG oslo_vmware.api [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.018655] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.254s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.022915] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.903s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.023198] env[68285]: DEBUG nova.objects.instance [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lazy-loading 'resources' on Instance uuid 631fe0ee-73a6-48c5-9a14-f6a00d2c2942 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.064808] env[68285]: INFO nova.scheduler.client.report [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Deleted allocations for instance 12fad42a-1011-4563-b11f-7b141b2a1670 [ 938.285977] env[68285]: DEBUG oslo_vmware.api [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891535, 'name': RemoveSnapshot_Task, 'duration_secs': 0.464253} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.289231] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 938.289516] env[68285]: INFO nova.compute.manager [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Took 17.46 seconds to snapshot the instance on the hypervisor. [ 938.373367] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891536, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512259} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.373723] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e/c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 938.373975] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 938.374486] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-443d54bc-7edc-44e6-b07d-251db88bf7c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.383199] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 938.383199] env[68285]: value = "task-2891539" [ 938.383199] env[68285]: _type = "Task" [ 938.383199] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.390727] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.470860] env[68285]: DEBUG oslo_vmware.api [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Task: {'id': task-2891538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400473} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.471127] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.471306] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.471480] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.471647] env[68285]: INFO nova.compute.manager [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Took 1.08 seconds to destroy the instance on the hypervisor. [ 938.471885] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.472106] env[68285]: DEBUG nova.compute.manager [-] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.472187] env[68285]: DEBUG nova.network.neutron [-] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.488391] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.488644] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.488860] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "5266817c-ce3b-4c96-a3bd-32b631c29b81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.489498] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.489698] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.491608] env[68285]: INFO nova.compute.manager [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Terminating instance [ 938.498561] env[68285]: DEBUG nova.network.neutron [-] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.541481] env[68285]: INFO nova.compute.manager [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Rebuilding instance [ 938.572582] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a408ea5-80c2-4171-8aa5-753e89e8c229 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "12fad42a-1011-4563-b11f-7b141b2a1670" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.016s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.579877] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.580156] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.580362] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.580542] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.580709] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.583117] env[68285]: INFO nova.compute.manager [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Terminating instance [ 938.590575] env[68285]: DEBUG nova.compute.manager [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.591596] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0cc2f9-2175-440f-94ad-8b06ba25b7f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.622306] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d0a6bd-f5c8-4ed5-bfa9-fdfc3e22c306 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.631361] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e70ed6-35c1-4e43-af70-915058da62f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.663202] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0b29e8-101b-44e6-970e-da27587f7556 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.668127] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Successfully updated port: 5728aafe-c57f-4eb7-a866-ad586f491645 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.673224] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b405aa-b58b-4938-8d4f-367ae1f2ade6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.688455] env[68285]: DEBUG nova.compute.provider_tree [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.795689] env[68285]: DEBUG nova.compute.manager [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Instance disappeared during snapshot {{(pid=68285) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 938.808714] env[68285]: DEBUG nova.compute.manager [req-d7736cf8-3046-4cf8-93cf-a503ddb8ba86 req-35df85a4-0de5-4317-9cbf-8060196cd358 service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received event network-vif-plugged-5728aafe-c57f-4eb7-a866-ad586f491645 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.808714] env[68285]: DEBUG oslo_concurrency.lockutils [req-d7736cf8-3046-4cf8-93cf-a503ddb8ba86 req-35df85a4-0de5-4317-9cbf-8060196cd358 service nova] Acquiring lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.808714] env[68285]: DEBUG oslo_concurrency.lockutils [req-d7736cf8-3046-4cf8-93cf-a503ddb8ba86 req-35df85a4-0de5-4317-9cbf-8060196cd358 service nova] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.808912] env[68285]: DEBUG oslo_concurrency.lockutils [req-d7736cf8-3046-4cf8-93cf-a503ddb8ba86 req-35df85a4-0de5-4317-9cbf-8060196cd358 service nova] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.809955] env[68285]: DEBUG nova.compute.manager [req-d7736cf8-3046-4cf8-93cf-a503ddb8ba86 req-35df85a4-0de5-4317-9cbf-8060196cd358 service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] No waiting events found dispatching network-vif-plugged-5728aafe-c57f-4eb7-a866-ad586f491645 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 938.810227] env[68285]: WARNING nova.compute.manager [req-d7736cf8-3046-4cf8-93cf-a503ddb8ba86 req-35df85a4-0de5-4317-9cbf-8060196cd358 service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received unexpected event network-vif-plugged-5728aafe-c57f-4eb7-a866-ad586f491645 for instance with vm_state building and task_state spawning. [ 938.812453] env[68285]: DEBUG nova.compute.manager [None req-d5512c96-ed16-4726-91c0-47d9afc70767 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image not found during clean up 99446637-e427-4a35-9dd2-98fd5edf8d53 {{(pid=68285) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 938.892837] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066174} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.892837] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 938.893425] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2311f757-467b-4945-8d04-c07fa2bf1da2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.916761] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e/c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.917061] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6009495b-35d6-43a3-8ca8-db87a7da31a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.941072] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 938.941072] env[68285]: value = "task-2891540" [ 938.941072] env[68285]: _type = "Task" [ 938.941072] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.948337] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891540, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.004025] env[68285]: DEBUG nova.network.neutron [-] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.005131] env[68285]: DEBUG nova.compute.manager [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 939.005355] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.006248] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41f1802-7a3c-4afa-b0a7-00d645284255 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.014908] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.015742] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2450c7b3-52f3-4622-9822-dcfb6cbfa1bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.076514] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 939.076836] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 939.077093] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleting the datastore file [datastore1] 5266817c-ce3b-4c96-a3bd-32b631c29b81 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.077466] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d69c419-abf6-4a0c-b471-09973e4204e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.083409] env[68285]: DEBUG oslo_vmware.api [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 939.083409] env[68285]: value = "task-2891542" [ 939.083409] env[68285]: _type = "Task" [ 939.083409] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.092033] env[68285]: DEBUG nova.compute.manager [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 939.092312] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.092553] env[68285]: DEBUG oslo_vmware.api [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891542, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.092768] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17ec0e8a-9908-406f-9b3a-8e893c819455 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.097944] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 939.097944] env[68285]: value = "task-2891543" [ 939.097944] env[68285]: _type = "Task" [ 939.097944] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.106786] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.191730] env[68285]: DEBUG nova.scheduler.client.report [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.317716] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "3c71f649-b456-45a0-a113-725a529702a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.317966] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "3c71f649-b456-45a0-a113-725a529702a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.450251] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.511432] env[68285]: INFO nova.compute.manager [-] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Took 1.04 seconds to deallocate network for instance. [ 939.593360] env[68285]: DEBUG oslo_vmware.api [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891542, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198741} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.593554] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.593830] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.594021] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.594196] env[68285]: INFO nova.compute.manager [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Took 0.59 seconds to destroy the instance on the hypervisor. [ 939.594434] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.594614] env[68285]: DEBUG nova.compute.manager [-] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 939.594708] env[68285]: DEBUG nova.network.neutron [-] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 939.606527] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.608517] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.608517] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bbf292f-8974-4d25-b176-e9ea490aa615 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.615209] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 939.615209] env[68285]: value = "task-2891544" [ 939.615209] env[68285]: _type = "Task" [ 939.615209] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.623494] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.699552] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.700207] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.909s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.700452] env[68285]: DEBUG nova.objects.instance [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lazy-loading 'resources' on Instance uuid 5b58896c-cb07-48c8-ace0-385486a3e19d {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.722636] env[68285]: INFO nova.scheduler.client.report [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Deleted allocations for instance 631fe0ee-73a6-48c5-9a14-f6a00d2c2942 [ 939.952768] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891540, 'name': ReconfigVM_Task, 'duration_secs': 1.000567} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.953083] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Reconfigured VM instance instance-00000032 to attach disk [datastore1] c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e/c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 939.953689] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb24e7a1-8cc8-490a-becd-2b1e61f6fed7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.961017] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 939.961017] env[68285]: value = "task-2891545" [ 939.961017] env[68285]: _type = "Task" [ 939.961017] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.967597] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891545, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.018639] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.108644] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891543, 'name': PowerOffVM_Task, 'duration_secs': 0.698584} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.108644] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.108813] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 940.108992] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580913', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'name': 'volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a1cc678-2bb2-403e-b6e8-afdeb8362eac', 'attached_at': '', 'detached_at': '', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'serial': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 940.109754] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be18d4e-6009-4f4c-856b-7a0e91b334f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.134233] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc2d2ce-4904-455a-a686-e24ded2006e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.142031] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891544, 'name': PowerOffVM_Task, 'duration_secs': 0.196162} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.143743] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.143980] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.144924] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0033645a-804d-498b-ac3d-f32435207090 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.147913] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a203bb6b-def6-4110-8ac5-47a0ae00614c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.154661] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.169620] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da2d2658-8d01-454c-ae64-148e052201ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.171961] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10df5c59-d4e4-4e59-82e4-c41c85a8b0d7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.186760] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] The volume has not been displaced from its original location: [datastore1] volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb/volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 940.192025] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Reconfiguring VM instance instance-0000002f to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 940.192273] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-236de170-d7fd-435f-a272-3600b9e879de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.211983] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 940.211983] env[68285]: value = "task-2891547" [ 940.211983] env[68285]: _type = "Task" [ 940.211983] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.220491] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891547, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.234831] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2069a59d-ad15-46aa-84da-d768045200d8 tempest-ServerTagsTestJSON-1175634675 tempest-ServerTagsTestJSON-1175634675-project-member] Lock "631fe0ee-73a6-48c5-9a14-f6a00d2c2942" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.196s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.250981] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.251247] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.251453] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleting the datastore file [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.252302] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21b0c193-9bab-41b4-a86d-13bf5a623344 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.258710] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 940.258710] env[68285]: value = "task-2891548" [ 940.258710] env[68285]: _type = "Task" [ 940.258710] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.271357] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.346109] env[68285]: DEBUG nova.network.neutron [-] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.471827] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891545, 'name': Rename_Task, 'duration_secs': 0.135766} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.474758] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.475580] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-775bb6f6-a677-4c20-bfaf-09ea56bfed38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.482455] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 940.482455] env[68285]: value = "task-2891549" [ 940.482455] env[68285]: _type = "Task" [ 940.482455] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.494787] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.723312] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891547, 'name': ReconfigVM_Task, 'duration_secs': 0.371123} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.723599] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Reconfigured VM instance instance-0000002f to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 940.729394] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Successfully updated port: 50c33bdf-4dea-4c86-9423-a50e9db0b741 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.730718] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33aab27b-be0a-40b9-a31d-7ee56c829ed8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.749108] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 940.749108] env[68285]: value = "task-2891550" [ 940.749108] env[68285]: _type = "Task" [ 940.749108] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.758248] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.759844] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4811c011-e3dd-489c-9d42-1d969acca91a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.771988] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bdf162-ddb9-45dd-a3e4-6e76f2a293c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.775057] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138729} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.775305] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.775485] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.775664] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.805646] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239c9ec0-e811-4e8e-908a-cfa0b67e7b45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.813727] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d7a393-bd58-431e-809b-0e44e17a43a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.830241] env[68285]: DEBUG nova.compute.provider_tree [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.841547] env[68285]: DEBUG nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received event network-changed-5728aafe-c57f-4eb7-a866-ad586f491645 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.841547] env[68285]: DEBUG nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Refreshing instance network info cache due to event network-changed-5728aafe-c57f-4eb7-a866-ad586f491645. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 940.841547] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Acquiring lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.841547] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Acquired lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.841547] env[68285]: DEBUG nova.network.neutron [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Refreshing network info cache for port 5728aafe-c57f-4eb7-a866-ad586f491645 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.853058] env[68285]: INFO nova.compute.manager [-] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Took 1.26 seconds to deallocate network for instance. [ 940.993698] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891549, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.241427] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.259379] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891550, 'name': ReconfigVM_Task, 'duration_secs': 0.248813} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.259727] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580913', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'name': 'volume-9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a1cc678-2bb2-403e-b6e8-afdeb8362eac', 'attached_at': '', 'detached_at': '', 'volume_id': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb', 'serial': '9c9ab8e5-af3d-4467-aa6d-9969e7562dfb'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 941.260039] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.260791] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5201297a-b18e-4000-93bb-825af6111c4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.268158] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.268394] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f8903b7-11ca-45c8-bf24-42ca3f3ab12b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.333774] env[68285]: DEBUG nova.scheduler.client.report [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.339102] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.339332] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.339563] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleting the datastore file [datastore2] 2a1cc678-2bb2-403e-b6e8-afdeb8362eac {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.339918] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a6af0f5-2948-41e6-8012-c9e0e84533a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.347107] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 941.347107] env[68285]: value = "task-2891553" [ 941.347107] env[68285]: _type = "Task" [ 941.347107] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.357032] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.357975] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.377604] env[68285]: DEBUG nova.network.neutron [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.470696] env[68285]: DEBUG nova.network.neutron [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.498156] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891549, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.828164] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 941.828516] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.828758] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 941.829069] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.829289] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 941.829756] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 941.830066] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 941.830282] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 941.830536] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 941.830780] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 941.831037] env[68285]: DEBUG nova.virt.hardware [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 941.831981] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b0dead-9edf-4169-99d0-fe5d61b3de15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.839759] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.140s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.843831] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.427s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.843831] env[68285]: INFO nova.compute.claims [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.847884] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4ef3a9-2f72-41b1-bbbb-c68e4b004adf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.875314] env[68285]: DEBUG oslo_vmware.api [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152076} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.884548] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:d3:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c317408-dd23-42c9-a837-c59782c5654a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.885196] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.886213] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.886213] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.886213] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.886213] env[68285]: INFO nova.compute.manager [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Took 2.79 seconds to destroy the instance on the hypervisor. [ 941.888389] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.888389] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 941.888389] env[68285]: DEBUG nova.compute.manager [-] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.888389] env[68285]: DEBUG nova.network.neutron [-] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.889311] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf1ab416-1892-44d2-bb5d-d90f9b170c6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.913571] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.913571] env[68285]: value = "task-2891554" [ 941.913571] env[68285]: _type = "Task" [ 941.913571] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.920464] env[68285]: INFO nova.scheduler.client.report [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Deleted allocations for instance 5b58896c-cb07-48c8-ace0-385486a3e19d [ 941.926478] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891554, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.972297] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Releasing lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.973398] env[68285]: DEBUG nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Received event network-vif-deleted-0a2792b7-2c8a-4e2e-b434-f8073b771b2d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 941.973898] env[68285]: DEBUG nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received event network-vif-plugged-50c33bdf-4dea-4c86-9423-a50e9db0b741 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 941.973898] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Acquiring lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.974719] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.975256] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.978016] env[68285]: DEBUG nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] No waiting events found dispatching network-vif-plugged-50c33bdf-4dea-4c86-9423-a50e9db0b741 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 941.978016] env[68285]: WARNING nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received unexpected event network-vif-plugged-50c33bdf-4dea-4c86-9423-a50e9db0b741 for instance with vm_state building and task_state spawning. [ 941.978016] env[68285]: DEBUG nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received event network-changed-50c33bdf-4dea-4c86-9423-a50e9db0b741 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 941.978016] env[68285]: DEBUG nova.compute.manager [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Refreshing instance network info cache due to event network-changed-50c33bdf-4dea-4c86-9423-a50e9db0b741. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 941.978016] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Acquiring lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.978016] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Acquired lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.978016] env[68285]: DEBUG nova.network.neutron [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Refreshing network info cache for port 50c33bdf-4dea-4c86-9423-a50e9db0b741 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.997070] env[68285]: DEBUG oslo_vmware.api [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891549, 'name': PowerOnVM_Task, 'duration_secs': 1.096535} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.997070] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.997484] env[68285]: INFO nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Took 9.36 seconds to spawn the instance on the hypervisor. [ 941.997484] env[68285]: DEBUG nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 941.999025] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e74742-0fad-458e-b4b6-09f08143a1cf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.428831] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891554, 'name': CreateVM_Task, 'duration_secs': 0.494377} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.429090] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.429835] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.430297] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.430682] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.430971] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73e5eeed-e7d4-4178-9268-743ed3a9678f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.437472] env[68285]: DEBUG oslo_concurrency.lockutils [None req-83b22e0c-5cc6-46b7-a64c-834013e4cc1d tempest-VolumesAssistedSnapshotsTest-188827192 tempest-VolumesAssistedSnapshotsTest-188827192-project-member] Lock "5b58896c-cb07-48c8-ace0-385486a3e19d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.327s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.442131] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 942.442131] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ed392c-3e5f-1006-110b-11dc40223a35" [ 942.442131] env[68285]: _type = "Task" [ 942.442131] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.452011] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ed392c-3e5f-1006-110b-11dc40223a35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.518048] env[68285]: INFO nova.compute.manager [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Took 43.37 seconds to build instance. [ 942.524617] env[68285]: DEBUG nova.network.neutron [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.616198] env[68285]: DEBUG nova.network.neutron [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.754044] env[68285]: DEBUG nova.network.neutron [-] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.920187] env[68285]: DEBUG nova.compute.manager [req-48372bd5-a845-4110-aadd-0ef92bbbd277 req-763af2df-b396-4d70-aef5-f52b435787d0 service nova] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Received event network-vif-deleted-9e7d680e-05f0-476c-b513-752cb59b861e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.955449] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ed392c-3e5f-1006-110b-11dc40223a35, 'name': SearchDatastore_Task, 'duration_secs': 0.016658} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.955449] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.955972] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.956904] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.956904] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.956904] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.957233] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dde8ea9-5c5d-4575-91b5-89f8d915587e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.963716] env[68285]: DEBUG nova.compute.manager [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 942.964590] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5d7618-3d61-49d8-8cc6-3e7258724342 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.974728] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.974916] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.977390] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b85d588-8ddf-4c9a-86fa-7a3a2b98c80b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.986849] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 942.986849] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ea0566-7ccf-819d-a2b5-80f3efdf49c8" [ 942.986849] env[68285]: _type = "Task" [ 942.986849] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.004058] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ea0566-7ccf-819d-a2b5-80f3efdf49c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.023281] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66c6826-732e-46b7-833c-abb2bcc1d6b4 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.442s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.119207] env[68285]: DEBUG oslo_concurrency.lockutils [req-642526fb-0a2f-4991-bf68-dd8a32592edf req-2d8a0a88-db12-4a77-90ae-eda216d5160d service nova] Releasing lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.119610] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.121287] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.258527] env[68285]: INFO nova.compute.manager [-] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Took 1.37 seconds to deallocate network for instance. [ 943.454375] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2d94a5-055c-49db-800c-b2e005bd8d30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.463916] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a166fea4-e11c-4ff5-84f1-8c806a0b743d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.498338] env[68285]: INFO nova.compute.manager [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] instance snapshotting [ 943.498994] env[68285]: DEBUG nova.objects.instance [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'flavor' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.504735] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19baa18-d2f0-492a-b172-3618b9efa4fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.521340] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f536763b-44cf-4648-b75d-a0242c5a1f23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.522884] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ea0566-7ccf-819d-a2b5-80f3efdf49c8, 'name': SearchDatastore_Task, 'duration_secs': 0.027587} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.524653] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ec73d75-c398-452c-bac9-7d4412adbcb4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.537014] env[68285]: DEBUG nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 943.540212] env[68285]: DEBUG nova.compute.provider_tree [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.544705] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 943.544705] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b132b7-df59-2223-f3bd-d993ecf9866b" [ 943.544705] env[68285]: _type = "Task" [ 943.544705] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.557021] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b132b7-df59-2223-f3bd-d993ecf9866b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.688377] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.827531] env[68285]: INFO nova.compute.manager [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Took 0.57 seconds to detach 1 volumes for instance. [ 943.881852] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 943.882114] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580917', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'name': 'volume-f568d87f-424d-4432-b017-6f7542b87545', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5e101d74-7a82-4118-8f4c-7af9a6b0917a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'serial': 'f568d87f-424d-4432-b017-6f7542b87545'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 943.883469] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec00d26-4bb7-42ba-b7bf-cd06ced699ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.902979] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec6fbf1-9c02-4f07-a9ac-10f3c952c4d7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.927900] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] volume-f568d87f-424d-4432-b017-6f7542b87545/volume-f568d87f-424d-4432-b017-6f7542b87545.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.930948] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-577de7b8-b554-4d20-acfc-a1f83892a8a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.949493] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 943.949493] env[68285]: value = "task-2891555" [ 943.949493] env[68285]: _type = "Task" [ 943.949493] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.959201] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891555, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.012063] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467c4827-aa6e-42bb-9984-768efa0fa2ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.035999] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64006d1-486f-4e99-bcb1-2c94c809ecad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.046800] env[68285]: DEBUG nova.scheduler.client.report [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.073732] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b132b7-df59-2223-f3bd-d993ecf9866b, 'name': SearchDatastore_Task, 'duration_secs': 0.016157} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.074352] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.074444] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 944.075025] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a0050be-f329-4103-8285-ea5fab0b915f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.083145] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 944.083145] env[68285]: value = "task-2891556" [ 944.083145] env[68285]: _type = "Task" [ 944.083145] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.086804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.096956] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.196929] env[68285]: DEBUG nova.network.neutron [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Updating instance_info_cache with network_info: [{"id": "5728aafe-c57f-4eb7-a866-ad586f491645", "address": "fa:16:3e:be:7e:35", "network": {"id": "9a6153d2-38b3-4fd9-9895-1c56a6bda479", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1712974578", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5728aafe-c5", "ovs_interfaceid": "5728aafe-c57f-4eb7-a866-ad586f491645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "50c33bdf-4dea-4c86-9423-a50e9db0b741", "address": "fa:16:3e:b0:f7:3e", "network": {"id": "c797a233-8664-4ec5-b973-c903f01e7175", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1416743836", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50c33bdf-4d", "ovs_interfaceid": "50c33bdf-4dea-4c86-9423-a50e9db0b741", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.287095] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "9e81990d-e63e-48a7-8941-f0298ca184b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.287659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "9e81990d-e63e-48a7-8941-f0298ca184b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.338922] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.461422] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891555, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.559424] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.559424] env[68285]: DEBUG nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 944.560512] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.257s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.560925] env[68285]: DEBUG nova.objects.instance [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lazy-loading 'resources' on Instance uuid 3e656d8d-bd06-4886-9424-4ed76b98aae9 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.563726] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 944.564600] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b137264e-8075-44ad-acb6-41a933ded50b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.573696] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 944.573696] env[68285]: value = "task-2891557" [ 944.573696] env[68285]: _type = "Task" [ 944.573696] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.585099] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891557, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.593698] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891556, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50834} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.597026] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.597026] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.597026] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d577e6f1-323a-48e1-bd66-45265a58e235 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.601593] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 944.601593] env[68285]: value = "task-2891558" [ 944.601593] env[68285]: _type = "Task" [ 944.601593] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.617412] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.700130] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Releasing lock "refresh_cache-8b473550-4a40-48a5-9e1c-7c48df828e61" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.700527] env[68285]: DEBUG nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Instance network_info: |[{"id": "5728aafe-c57f-4eb7-a866-ad586f491645", "address": "fa:16:3e:be:7e:35", "network": {"id": "9a6153d2-38b3-4fd9-9895-1c56a6bda479", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1712974578", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5728aafe-c5", "ovs_interfaceid": "5728aafe-c57f-4eb7-a866-ad586f491645", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "50c33bdf-4dea-4c86-9423-a50e9db0b741", "address": "fa:16:3e:b0:f7:3e", "network": {"id": "c797a233-8664-4ec5-b973-c903f01e7175", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1416743836", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50c33bdf-4d", "ovs_interfaceid": "50c33bdf-4dea-4c86-9423-a50e9db0b741", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 944.701010] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:7e:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4954661-ff70-43dd-bc60-8cbca6b9cbfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5728aafe-c57f-4eb7-a866-ad586f491645', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:f7:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50c33bdf-4dea-4c86-9423-a50e9db0b741', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.712498] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 944.712498] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.712498] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dd52ca3-5f52-43ff-8035-13415326af52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.734654] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.734654] env[68285]: value = "task-2891559" [ 944.734654] env[68285]: _type = "Task" [ 944.734654] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.742952] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891559, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.962258] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891555, 'name': ReconfigVM_Task, 'duration_secs': 0.519694} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.962581] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Reconfigured VM instance instance-00000017 to attach disk [datastore1] volume-f568d87f-424d-4432-b017-6f7542b87545/volume-f568d87f-424d-4432-b017-6f7542b87545.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.967448] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc7abe40-48e3-4042-b2e7-1b51da61b2b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.982305] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 944.982305] env[68285]: value = "task-2891560" [ 944.982305] env[68285]: _type = "Task" [ 944.982305] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.993934] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891560, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.067953] env[68285]: DEBUG nova.compute.utils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 945.069468] env[68285]: DEBUG nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 945.069608] env[68285]: DEBUG nova.network.neutron [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 945.084154] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891557, 'name': CreateSnapshot_Task, 'duration_secs': 0.495095} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.084451] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 945.085291] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4913015-945a-4333-8e63-747b3a035d2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.110448] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067855} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.111045] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.112449] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b0d55a-4341-48ed-a856-6ffc7d925089 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.142431] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.144071] env[68285]: DEBUG nova.policy [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb7f978e7fa64e88af5756fca97fce6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4471597d3345443aa28b97acd91847e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 945.149708] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8eb6ebc7-cb97-4123-9311-5f05feccbbee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.174163] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 945.174163] env[68285]: value = "task-2891561" [ 945.174163] env[68285]: _type = "Task" [ 945.174163] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.186579] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891561, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.244006] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891559, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.460176] env[68285]: DEBUG nova.network.neutron [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Successfully created port: 38ebf797-d9b9-4c8d-8159-fdf3be92518b {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.493733] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.572905] env[68285]: DEBUG nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 945.609522] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 945.609645] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-57117235-3762-456a-b399-c0da4268279c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.622046] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 945.622046] env[68285]: value = "task-2891562" [ 945.622046] env[68285]: _type = "Task" [ 945.622046] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.631402] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891562, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.686462] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891561, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.710278] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400e6ade-c7ba-4073-a064-fb8111291a46 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.718343] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a861db-9d3f-4bcc-9001-48f2c9486ffc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.763395] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8cdfec-ee55-4dec-bf34-fd54164adc3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.773144] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891559, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.779282] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6053e896-db40-4448-8549-0d6eb86be02c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.794725] env[68285]: DEBUG nova.compute.provider_tree [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.994306] env[68285]: DEBUG oslo_vmware.api [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891560, 'name': ReconfigVM_Task, 'duration_secs': 0.620866} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.994694] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580917', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'name': 'volume-f568d87f-424d-4432-b017-6f7542b87545', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5e101d74-7a82-4118-8f4c-7af9a6b0917a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'serial': 'f568d87f-424d-4432-b017-6f7542b87545'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 946.133415] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891562, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.184943] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891561, 'name': ReconfigVM_Task, 'duration_secs': 0.873992} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.185209] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Reconfigured VM instance instance-0000000b to attach disk [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b/ec89a2a4-3bfc-45c5-b7f2-239b52995d6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.185830] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-171727fa-1e5c-4ae3-ae32-cbf0ea538da9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.192893] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 946.192893] env[68285]: value = "task-2891563" [ 946.192893] env[68285]: _type = "Task" [ 946.192893] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.202510] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891563, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.266640] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891559, 'name': CreateVM_Task, 'duration_secs': 1.288466} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.266766] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 946.267602] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.267824] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.268166] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 946.268458] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72f074ad-ef41-44eb-9a02-486c5ea366cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.273125] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 946.273125] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525082f5-a862-60ce-5cd2-73f0f81dae47" [ 946.273125] env[68285]: _type = "Task" [ 946.273125] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.281052] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525082f5-a862-60ce-5cd2-73f0f81dae47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.298535] env[68285]: DEBUG nova.scheduler.client.report [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.476551] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.476822] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.477048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.477235] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.477544] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.479752] env[68285]: INFO nova.compute.manager [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Terminating instance [ 946.584338] env[68285]: DEBUG nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 946.611296] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 946.611513] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.611672] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 946.611855] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.612000] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 946.612246] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 946.612460] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 946.612620] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 946.612784] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 946.614351] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 946.614562] env[68285]: DEBUG nova.virt.hardware [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 946.615905] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cec69c9-dafb-47e2-a873-0a5193be592b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.629618] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e544435-5567-4cc3-b9ff-5aca3eadd0f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.639955] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891562, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.703060] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891563, 'name': Rename_Task, 'duration_secs': 0.178685} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.707020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.707020] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5aca8735-8a3c-44c6-bdc3-8fded3ab7dfb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.709935] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 946.709935] env[68285]: value = "task-2891564" [ 946.709935] env[68285]: _type = "Task" [ 946.709935] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.718128] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.783625] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525082f5-a862-60ce-5cd2-73f0f81dae47, 'name': SearchDatastore_Task, 'duration_secs': 0.014611} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.783942] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.784197] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 946.784432] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.784576] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.784751] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 946.785029] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82be858b-7b8b-46df-b8e8-eb4a31d7ee4f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.794352] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 946.794539] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 946.795284] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcaea6e0-118e-457e-9598-2299a9841def {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.801241] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 946.801241] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5234b783-82fb-b12b-a92b-3175a50fdbf2" [ 946.801241] env[68285]: _type = "Task" [ 946.801241] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.807550] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.247s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.810382] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.593s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.812561] env[68285]: INFO nova.compute.claims [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.821857] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5234b783-82fb-b12b-a92b-3175a50fdbf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.836724] env[68285]: INFO nova.scheduler.client.report [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Deleted allocations for instance 3e656d8d-bd06-4886-9424-4ed76b98aae9 [ 946.984536] env[68285]: DEBUG nova.compute.manager [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 946.984536] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 946.985408] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc742770-b44b-4f03-ba14-da70c0bb27a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.996497] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.000023] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e02d48f-f3f8-482b-b512-0e5cce544e25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.008151] env[68285]: DEBUG oslo_vmware.api [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 947.008151] env[68285]: value = "task-2891565" [ 947.008151] env[68285]: _type = "Task" [ 947.008151] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.024403] env[68285]: DEBUG oslo_vmware.api [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.042018] env[68285]: DEBUG nova.objects.instance [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.044048] env[68285]: DEBUG nova.compute.manager [req-59db4221-12c7-4e3b-bddf-6b397c7a0a03 req-ca76ecb4-badc-4bc8-af2c-62a9f4331422 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Received event network-vif-plugged-38ebf797-d9b9-4c8d-8159-fdf3be92518b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.044418] env[68285]: DEBUG oslo_concurrency.lockutils [req-59db4221-12c7-4e3b-bddf-6b397c7a0a03 req-ca76ecb4-badc-4bc8-af2c-62a9f4331422 service nova] Acquiring lock "c690490f-9278-4595-8286-d4fd970bbc39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.046785] env[68285]: DEBUG oslo_concurrency.lockutils [req-59db4221-12c7-4e3b-bddf-6b397c7a0a03 req-ca76ecb4-badc-4bc8-af2c-62a9f4331422 service nova] Lock "c690490f-9278-4595-8286-d4fd970bbc39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.046785] env[68285]: DEBUG oslo_concurrency.lockutils [req-59db4221-12c7-4e3b-bddf-6b397c7a0a03 req-ca76ecb4-badc-4bc8-af2c-62a9f4331422 service nova] Lock "c690490f-9278-4595-8286-d4fd970bbc39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.046785] env[68285]: DEBUG nova.compute.manager [req-59db4221-12c7-4e3b-bddf-6b397c7a0a03 req-ca76ecb4-badc-4bc8-af2c-62a9f4331422 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] No waiting events found dispatching network-vif-plugged-38ebf797-d9b9-4c8d-8159-fdf3be92518b {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.046785] env[68285]: WARNING nova.compute.manager [req-59db4221-12c7-4e3b-bddf-6b397c7a0a03 req-ca76ecb4-badc-4bc8-af2c-62a9f4331422 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Received unexpected event network-vif-plugged-38ebf797-d9b9-4c8d-8159-fdf3be92518b for instance with vm_state building and task_state spawning. [ 947.141141] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891562, 'name': CloneVM_Task, 'duration_secs': 1.346691} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.143064] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Created linked-clone VM from snapshot [ 947.143064] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1688c32e-1b89-4dd0-9ad6-24a1397b1f2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.153835] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Uploading image 5d4138b6-a36b-4e65-84a4-3ee0131980e6 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 947.198886] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 947.198886] env[68285]: value = "vm-580921" [ 947.198886] env[68285]: _type = "VirtualMachine" [ 947.198886] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 947.199237] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2403932d-b639-42c9-bdd4-c37d01b6fdf4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.207107] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease: (returnval){ [ 947.207107] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52973ff8-6dfb-8e98-7540-0ba7f448cfd6" [ 947.207107] env[68285]: _type = "HttpNfcLease" [ 947.207107] env[68285]: } obtained for exporting VM: (result){ [ 947.207107] env[68285]: value = "vm-580921" [ 947.207107] env[68285]: _type = "VirtualMachine" [ 947.207107] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 947.207525] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the lease: (returnval){ [ 947.207525] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52973ff8-6dfb-8e98-7540-0ba7f448cfd6" [ 947.207525] env[68285]: _type = "HttpNfcLease" [ 947.207525] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 947.218844] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 947.218844] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52973ff8-6dfb-8e98-7540-0ba7f448cfd6" [ 947.218844] env[68285]: _type = "HttpNfcLease" [ 947.218844] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 947.223181] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 947.223181] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52973ff8-6dfb-8e98-7540-0ba7f448cfd6" [ 947.223181] env[68285]: _type = "HttpNfcLease" [ 947.223181] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 947.223905] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891564, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.224689] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762dd681-42e3-49fc-a107-8fa8b70e0032 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.234435] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527ae4a9-c5a5-8cc5-430a-2b0bfb6f5cde/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 947.234584] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527ae4a9-c5a5-8cc5-430a-2b0bfb6f5cde/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 947.237666] env[68285]: DEBUG nova.network.neutron [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Successfully updated port: 38ebf797-d9b9-4c8d-8159-fdf3be92518b {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.320633] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5234b783-82fb-b12b-a92b-3175a50fdbf2, 'name': SearchDatastore_Task, 'duration_secs': 0.020628} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.321870] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64e64f2c-5b0e-4055-afa8-10b43a409d3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.327813] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 947.327813] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5240d0b1-89e2-41b6-99f8-f1789594f8cf" [ 947.327813] env[68285]: _type = "Task" [ 947.327813] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.337461] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5240d0b1-89e2-41b6-99f8-f1789594f8cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.345159] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e7741f15-d823-4907-92c8-5bcdcb6503e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.352796] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc7c81d5-3828-46b9-ad90-845db2ec7f39 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "3e656d8d-bd06-4886-9424-4ed76b98aae9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.232s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.520545] env[68285]: DEBUG oslo_vmware.api [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891565, 'name': PowerOffVM_Task, 'duration_secs': 0.175412} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.520816] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 947.520983] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 947.522140] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd53a204-a1c6-4e82-9480-a5cc7f43a052 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.548841] env[68285]: DEBUG oslo_concurrency.lockutils [None req-78300ec4-0f6a-4eb4-809a-c2ef9bf4161b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.816s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.589511] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 947.589856] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 947.590498] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Deleting the datastore file [datastore1] c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 947.590843] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44819eec-f051-4e6c-97e2-0e63e7ce8149 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.598560] env[68285]: DEBUG oslo_vmware.api [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 947.598560] env[68285]: value = "task-2891568" [ 947.598560] env[68285]: _type = "Task" [ 947.598560] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.607356] env[68285]: DEBUG oslo_vmware.api [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.642686] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.642988] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.643207] env[68285]: DEBUG nova.compute.manager [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.644263] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5dd32e7-d950-4f2c-ac75-11dadd4bfc51 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.651965] env[68285]: DEBUG nova.compute.manager [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 947.653104] env[68285]: DEBUG nova.objects.instance [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.721620] env[68285]: DEBUG oslo_vmware.api [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891564, 'name': PowerOnVM_Task, 'duration_secs': 0.626482} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.722813] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.722813] env[68285]: DEBUG nova.compute.manager [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.723381] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc732af-7e23-4a6e-92ef-5396a770a41e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.741637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-c690490f-9278-4595-8286-d4fd970bbc39" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.741812] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-c690490f-9278-4595-8286-d4fd970bbc39" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.742008] env[68285]: DEBUG nova.network.neutron [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.840680] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5240d0b1-89e2-41b6-99f8-f1789594f8cf, 'name': SearchDatastore_Task, 'duration_secs': 0.010023} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.841343] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.841903] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8b473550-4a40-48a5-9e1c-7c48df828e61/8b473550-4a40-48a5-9e1c-7c48df828e61.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 947.842644] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bda7dc2-e376-4540-b91e-0acfe450340c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.853096] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 947.853096] env[68285]: value = "task-2891569" [ 947.853096] env[68285]: _type = "Task" [ 947.853096] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.863882] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.112451] env[68285]: DEBUG oslo_vmware.api [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259422} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.112876] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.116018] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.116018] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.116018] env[68285]: INFO nova.compute.manager [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 948.116018] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 948.116018] env[68285]: DEBUG nova.compute.manager [-] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 948.116018] env[68285]: DEBUG nova.network.neutron [-] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 948.251960] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.336473] env[68285]: DEBUG nova.network.neutron [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.370898] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891569, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.464793] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b85562-e694-434f-a746-bb8846e4fb22 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.472260] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610a7cca-c069-4370-8b93-c2d25dc1e862 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.519973] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970f9b94-dc82-4917-8700-900598617557 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.527696] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2353c278-1076-46fc-a60a-b0550526921b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.541996] env[68285]: DEBUG nova.compute.provider_tree [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.585072] env[68285]: DEBUG nova.network.neutron [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Updating instance_info_cache with network_info: [{"id": "38ebf797-d9b9-4c8d-8159-fdf3be92518b", "address": "fa:16:3e:0c:90:39", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ebf797-d9", "ovs_interfaceid": "38ebf797-d9b9-4c8d-8159-fdf3be92518b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.670555] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.670855] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83fbe3a2-2eac-4fd2-b16b-876560e31541 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.678232] env[68285]: DEBUG oslo_vmware.api [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 948.678232] env[68285]: value = "task-2891570" [ 948.678232] env[68285]: _type = "Task" [ 948.678232] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.688296] env[68285]: DEBUG oslo_vmware.api [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.873301] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632419} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.873301] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8b473550-4a40-48a5-9e1c-7c48df828e61/8b473550-4a40-48a5-9e1c-7c48df828e61.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 948.873301] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 948.874159] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-379adc90-89c9-4853-8c1b-26e4503375ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.883267] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 948.883267] env[68285]: value = "task-2891571" [ 948.883267] env[68285]: _type = "Task" [ 948.883267] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.891764] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891571, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.047714] env[68285]: DEBUG nova.scheduler.client.report [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.087440] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-c690490f-9278-4595-8286-d4fd970bbc39" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.087755] env[68285]: DEBUG nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Instance network_info: |[{"id": "38ebf797-d9b9-4c8d-8159-fdf3be92518b", "address": "fa:16:3e:0c:90:39", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ebf797-d9", "ovs_interfaceid": "38ebf797-d9b9-4c8d-8159-fdf3be92518b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 949.088195] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:90:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38ebf797-d9b9-4c8d-8159-fdf3be92518b', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.096337] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.096845] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.097231] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0cd7bb7-819a-4dbf-bff6-f59b94e68e18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.130858] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.130858] env[68285]: value = "task-2891572" [ 949.130858] env[68285]: _type = "Task" [ 949.130858] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.139069] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891572, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.149142] env[68285]: DEBUG nova.network.neutron [-] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.192033] env[68285]: DEBUG oslo_vmware.api [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891570, 'name': PowerOffVM_Task, 'duration_secs': 0.377407} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.192374] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 949.192643] env[68285]: DEBUG nova.compute.manager [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.193784] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e133f9-7ac3-4ce1-a2aa-aaa141ae8ba0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.254097] env[68285]: DEBUG nova.compute.manager [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Received event network-changed-38ebf797-d9b9-4c8d-8159-fdf3be92518b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.254480] env[68285]: DEBUG nova.compute.manager [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Refreshing instance network info cache due to event network-changed-38ebf797-d9b9-4c8d-8159-fdf3be92518b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 949.254803] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] Acquiring lock "refresh_cache-c690490f-9278-4595-8286-d4fd970bbc39" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.255352] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] Acquired lock "refresh_cache-c690490f-9278-4595-8286-d4fd970bbc39" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.255566] env[68285]: DEBUG nova.network.neutron [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Refreshing network info cache for port 38ebf797-d9b9-4c8d-8159-fdf3be92518b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.394397] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891571, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07555} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.394690] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 949.395506] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05771460-a6f5-4583-ae71-ec4839b903b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.420529] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 8b473550-4a40-48a5-9e1c-7c48df828e61/8b473550-4a40-48a5-9e1c-7c48df828e61.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.420854] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cc27259-85eb-4966-b919-dcf00230ff24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.442398] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 949.442398] env[68285]: value = "task-2891573" [ 949.442398] env[68285]: _type = "Task" [ 949.442398] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.453060] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891573, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.555221] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.745s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.555952] env[68285]: DEBUG nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 949.559621] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 32.814s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.641344] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891572, 'name': CreateVM_Task, 'duration_secs': 0.391505} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.641601] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 949.642435] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.642698] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.643236] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 949.643551] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4d220ee-5c93-4afc-993f-81fe833433a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.648409] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 949.648409] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525c34ff-1f40-8ea1-8350-735e8162ec0f" [ 949.648409] env[68285]: _type = "Task" [ 949.648409] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.653690] env[68285]: INFO nova.compute.manager [-] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Took 1.54 seconds to deallocate network for instance. [ 949.664404] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525c34ff-1f40-8ea1-8350-735e8162ec0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.708476] env[68285]: DEBUG oslo_concurrency.lockutils [None req-08200d87-1b27-4fff-868b-446ba05fe44e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.065s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.955905] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891573, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.013185] env[68285]: DEBUG nova.network.neutron [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Updated VIF entry in instance network info cache for port 38ebf797-d9b9-4c8d-8159-fdf3be92518b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.013559] env[68285]: DEBUG nova.network.neutron [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Updating instance_info_cache with network_info: [{"id": "38ebf797-d9b9-4c8d-8159-fdf3be92518b", "address": "fa:16:3e:0c:90:39", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ebf797-d9", "ovs_interfaceid": "38ebf797-d9b9-4c8d-8159-fdf3be92518b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.065453] env[68285]: INFO nova.compute.claims [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.070262] env[68285]: DEBUG nova.compute.utils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 950.072319] env[68285]: DEBUG nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 950.073219] env[68285]: DEBUG nova.network.neutron [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 950.142600] env[68285]: DEBUG nova.policy [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49c41aed1d5444e1a2e73ab6ad55fec4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53c6c9c73f07454fbe69beeee428a15a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 950.162024] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525c34ff-1f40-8ea1-8350-735e8162ec0f, 'name': SearchDatastore_Task, 'duration_secs': 0.01145} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.162364] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.162612] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.162838] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.162984] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.163184] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.164058] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-647f744d-4b84-48af-ac3e-86fb9e97f48d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.166763] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.173413] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.173589] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.174357] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9254aa6-4a42-4dbd-9ae6-e3adf4c25bff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.179441] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 950.179441] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e6afbf-c939-b367-2e0f-f8c6c31f9b18" [ 950.179441] env[68285]: _type = "Task" [ 950.179441] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.188321] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e6afbf-c939-b367-2e0f-f8c6c31f9b18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.423827] env[68285]: DEBUG nova.network.neutron [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Successfully created port: 407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.456929] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891573, 'name': ReconfigVM_Task, 'duration_secs': 0.546493} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.459162] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 8b473550-4a40-48a5-9e1c-7c48df828e61/8b473550-4a40-48a5-9e1c-7c48df828e61.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 950.459909] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccac4e1a-1519-471e-b7ca-7ea9b1aff9a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.468208] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 950.468208] env[68285]: value = "task-2891574" [ 950.468208] env[68285]: _type = "Task" [ 950.468208] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.479968] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891574, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.519880] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] Releasing lock "refresh_cache-c690490f-9278-4595-8286-d4fd970bbc39" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.520169] env[68285]: DEBUG nova.compute.manager [req-8b8ae1f4-1965-483c-8c55-af43249e6fc7 req-6cdbfa61-84d6-498c-8116-c5ee88d3ecf7 service nova] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Received event network-vif-deleted-046ad230-e38b-43a3-ab69-77b20f182d76 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.561481] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.561746] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.561963] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.562307] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.562463] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.564718] env[68285]: INFO nova.compute.manager [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Terminating instance [ 950.574135] env[68285]: DEBUG nova.compute.utils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 950.575987] env[68285]: INFO nova.compute.resource_tracker [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating resource usage from migration f79dddc6-371b-407f-8616-9c12a70c50cf [ 950.632250] env[68285]: DEBUG nova.objects.instance [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.699255] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e6afbf-c939-b367-2e0f-f8c6c31f9b18, 'name': SearchDatastore_Task, 'duration_secs': 0.008652} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.703739] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f191e27-18b1-4b64-b3ff-aed88f3a40d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.709534] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 950.709534] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52609f5a-aa12-6907-3fd7-45fb00601484" [ 950.709534] env[68285]: _type = "Task" [ 950.709534] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.719705] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52609f5a-aa12-6907-3fd7-45fb00601484, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.983451] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891574, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.008752] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.009038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.009333] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.009436] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.009582] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.014137] env[68285]: INFO nova.compute.manager [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Terminating instance [ 951.070802] env[68285]: DEBUG nova.compute.manager [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 951.071154] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 951.072627] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6282e02-5591-44e2-a03e-eb1b22cfa8bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.081305] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.082124] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-101ac12e-7fb9-4b94-81a8-e81bb6802127 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.087465] env[68285]: DEBUG nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 951.098390] env[68285]: DEBUG oslo_vmware.api [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 951.098390] env[68285]: value = "task-2891575" [ 951.098390] env[68285]: _type = "Task" [ 951.098390] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.113918] env[68285]: DEBUG oslo_vmware.api [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.137958] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.138432] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.138604] env[68285]: DEBUG nova.network.neutron [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 951.138902] env[68285]: DEBUG nova.objects.instance [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'info_cache' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.144080] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984924c9-c672-4baf-989e-da580462b173 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.152052] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71819e2-5c50-4874-a5e4-cdb43d8f8f65 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.186303] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0994138a-9310-4613-8973-c8737999b99c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.194259] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1636a34d-e306-4832-be0b-6ae173b24707 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.210116] env[68285]: DEBUG nova.compute.provider_tree [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.221329] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52609f5a-aa12-6907-3fd7-45fb00601484, 'name': SearchDatastore_Task, 'duration_secs': 0.023716} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.221329] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.221477] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.221755] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03c15884-0711-4c7b-9087-b5643a2c7882 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.229432] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 951.229432] env[68285]: value = "task-2891576" [ 951.229432] env[68285]: _type = "Task" [ 951.229432] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.238480] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891576, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.481502] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891574, 'name': Rename_Task, 'duration_secs': 0.658294} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.481826] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.482146] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81fed8a8-c8a0-4903-abef-d6bcc8923c90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.488725] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 951.488725] env[68285]: value = "task-2891577" [ 951.488725] env[68285]: _type = "Task" [ 951.488725] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.498298] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.520862] env[68285]: DEBUG nova.compute.manager [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 951.521124] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 951.522017] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cafc96d-e0f3-41de-8601-0fac15d2380d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.533700] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.533981] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcf700dd-9667-471b-be8e-201586648f83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.541914] env[68285]: DEBUG oslo_vmware.api [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 951.541914] env[68285]: value = "task-2891578" [ 951.541914] env[68285]: _type = "Task" [ 951.541914] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.552489] env[68285]: DEBUG oslo_vmware.api [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.608519] env[68285]: DEBUG oslo_vmware.api [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891575, 'name': PowerOffVM_Task, 'duration_secs': 0.454335} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.608895] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.609090] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 951.609609] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52521104-db8f-4a52-90ee-65a2be5d52b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.648171] env[68285]: DEBUG nova.objects.base [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Object Instance<5e101d74-7a82-4118-8f4c-7af9a6b0917a> lazy-loaded attributes: flavor,info_cache {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 951.680530] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.680916] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.681257] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleting the datastore file [datastore1] 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.681578] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79bc1650-5eaa-4e8d-92ee-22225329a5bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.689287] env[68285]: DEBUG oslo_vmware.api [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 951.689287] env[68285]: value = "task-2891580" [ 951.689287] env[68285]: _type = "Task" [ 951.689287] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.708348] env[68285]: DEBUG oslo_vmware.api [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.716052] env[68285]: DEBUG nova.scheduler.client.report [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.747190] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891576, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.002328] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891577, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.014332] env[68285]: DEBUG nova.network.neutron [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Successfully updated port: 407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.050082] env[68285]: DEBUG nova.compute.manager [req-51356049-e01c-4ef8-b3e9-0edc468fcb90 req-d1929a8d-91d3-45d0-8e29-a3884dbed417 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Received event network-vif-plugged-407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 952.050360] env[68285]: DEBUG oslo_concurrency.lockutils [req-51356049-e01c-4ef8-b3e9-0edc468fcb90 req-d1929a8d-91d3-45d0-8e29-a3884dbed417 service nova] Acquiring lock "f13ad5e7-341f-4475-b334-2144b0923e3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.050590] env[68285]: DEBUG oslo_concurrency.lockutils [req-51356049-e01c-4ef8-b3e9-0edc468fcb90 req-d1929a8d-91d3-45d0-8e29-a3884dbed417 service nova] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.050767] env[68285]: DEBUG oslo_concurrency.lockutils [req-51356049-e01c-4ef8-b3e9-0edc468fcb90 req-d1929a8d-91d3-45d0-8e29-a3884dbed417 service nova] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.051035] env[68285]: DEBUG nova.compute.manager [req-51356049-e01c-4ef8-b3e9-0edc468fcb90 req-d1929a8d-91d3-45d0-8e29-a3884dbed417 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] No waiting events found dispatching network-vif-plugged-407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 952.051355] env[68285]: WARNING nova.compute.manager [req-51356049-e01c-4ef8-b3e9-0edc468fcb90 req-d1929a8d-91d3-45d0-8e29-a3884dbed417 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Received unexpected event network-vif-plugged-407962a9-eb0f-4437-a1b8-4513d48c09a0 for instance with vm_state building and task_state spawning. [ 952.060825] env[68285]: DEBUG oslo_vmware.api [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891578, 'name': PowerOffVM_Task, 'duration_secs': 0.210572} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.061272] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.061483] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.061813] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b494a67e-cdf3-4172-a75e-8e130fed8987 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.102267] env[68285]: DEBUG nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 952.134483] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:50:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1283682463',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-182817119',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 952.134791] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.134950] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 952.135145] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.135294] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 952.135466] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 952.135681] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 952.135873] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 952.136061] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 952.136233] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 952.136406] env[68285]: DEBUG nova.virt.hardware [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 952.137495] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b5a99a-3fa2-4e67-a6f7-3efe6e9f15b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.150224] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b58e6d1-ff0e-49b4-83a9-bb92db60375a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.158048] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.158274] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.158452] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Deleting the datastore file [datastore2] e3b01f87-6a4c-4127-9204-2bfa5ff28f38 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.159117] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53d4b2b2-7811-402a-8d4d-22862e7da122 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.174231] env[68285]: DEBUG oslo_vmware.api [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for the task: (returnval){ [ 952.174231] env[68285]: value = "task-2891582" [ 952.174231] env[68285]: _type = "Task" [ 952.174231] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.182463] env[68285]: DEBUG oslo_vmware.api [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.200803] env[68285]: DEBUG oslo_vmware.api [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321916} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.201125] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.201370] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 952.201528] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 952.201798] env[68285]: INFO nova.compute.manager [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 952.202205] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 952.202426] env[68285]: DEBUG nova.compute.manager [-] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 952.202522] env[68285]: DEBUG nova.network.neutron [-] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 952.224378] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.665s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.224618] env[68285]: INFO nova.compute.manager [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Migrating [ 952.234051] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.064s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.235646] env[68285]: INFO nova.compute.claims [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.258323] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891576, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.911066} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.258834] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.259079] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.259380] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-869f6481-098e-49d9-a572-a5b34921da2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.267893] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 952.267893] env[68285]: value = "task-2891583" [ 952.267893] env[68285]: _type = "Task" [ 952.267893] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.280829] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891583, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.500971] env[68285]: DEBUG oslo_vmware.api [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891577, 'name': PowerOnVM_Task, 'duration_secs': 0.619401} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.501651] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.501930] env[68285]: INFO nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Took 14.71 seconds to spawn the instance on the hypervisor. [ 952.502195] env[68285]: DEBUG nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.503764] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d195f3-3afe-4e97-8123-92df3da8d4e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.516764] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.517217] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.521094] env[68285]: DEBUG nova.network.neutron [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.684822] env[68285]: DEBUG oslo_vmware.api [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Task: {'id': task-2891582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230158} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.685391] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.685767] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 952.686422] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 952.686781] env[68285]: INFO nova.compute.manager [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Took 1.17 seconds to destroy the instance on the hypervisor. [ 952.687199] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 952.688090] env[68285]: DEBUG nova.compute.manager [-] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 952.688562] env[68285]: DEBUG nova.network.neutron [-] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 952.691171] env[68285]: DEBUG nova.network.neutron [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updating instance_info_cache with network_info: [{"id": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "address": "fa:16:3e:c7:6a:21", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462b7f0c-cb", "ovs_interfaceid": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.752392] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.752577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.752750] env[68285]: DEBUG nova.network.neutron [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.781344] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891583, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095138} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.781609] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.782854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f901f5-356a-4b68-8942-183cebd7204a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.811860] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.814063] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a5dba7c-2402-43c3-bf80-048a2e7866f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.842093] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 952.842093] env[68285]: value = "task-2891584" [ 952.842093] env[68285]: _type = "Task" [ 952.842093] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.852508] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891584, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.029223] env[68285]: INFO nova.compute.manager [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Took 50.41 seconds to build instance. [ 953.071043] env[68285]: DEBUG nova.network.neutron [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.194472] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.320603] env[68285]: DEBUG nova.network.neutron [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Updating instance_info_cache with network_info: [{"id": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "address": "fa:16:3e:7a:30:a0", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap407962a9-eb", "ovs_interfaceid": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.335010] env[68285]: DEBUG nova.network.neutron [-] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.342919] env[68285]: DEBUG nova.compute.manager [req-0565efa9-f3ac-4c8f-b9b4-49b03805d7d9 req-5cc995d1-c393-4e75-b594-33e1cd7f073b service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Received event network-vif-deleted-03219bf0-d5df-4a05-8632-cb282cf3fa2e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.343139] env[68285]: INFO nova.compute.manager [req-0565efa9-f3ac-4c8f-b9b4-49b03805d7d9 req-5cc995d1-c393-4e75-b594-33e1cd7f073b service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Neutron deleted interface 03219bf0-d5df-4a05-8632-cb282cf3fa2e; detaching it from the instance and deleting it from the info cache [ 953.343310] env[68285]: DEBUG nova.network.neutron [req-0565efa9-f3ac-4c8f-b9b4-49b03805d7d9 req-5cc995d1-c393-4e75-b594-33e1cd7f073b service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.358452] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891584, 'name': ReconfigVM_Task, 'duration_secs': 0.464127} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.363041] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Reconfigured VM instance instance-00000034 to attach disk [datastore2] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.366650] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a7c71241-0b1c-4af5-81f4-ce788005ff69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.375567] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 953.375567] env[68285]: value = "task-2891585" [ 953.375567] env[68285]: _type = "Task" [ 953.375567] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.389456] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891585, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.531039] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a66842d8-1547-4427-8b5d-c03891306140 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.152s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.617483] env[68285]: DEBUG nova.network.neutron [-] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.639397] env[68285]: DEBUG nova.network.neutron [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance_info_cache with network_info: [{"id": "567381f7-5f78-4920-beb9-db0ef3479244", "address": "fa:16:3e:68:fd:5e", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap567381f7-5f", "ovs_interfaceid": "567381f7-5f78-4920-beb9-db0ef3479244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.806852] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "8b473550-4a40-48a5-9e1c-7c48df828e61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.807133] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.807387] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.808040] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.808165] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.810560] env[68285]: INFO nova.compute.manager [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Terminating instance [ 953.823098] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Releasing lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.823433] env[68285]: DEBUG nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Instance network_info: |[{"id": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "address": "fa:16:3e:7a:30:a0", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap407962a9-eb", "ovs_interfaceid": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 953.824842] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:30:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '407962a9-eb0f-4437-a1b8-4513d48c09a0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.832675] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.833343] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.833601] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-383b86cd-501c-4f45-a741-bb168cbfc261 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.852436] env[68285]: INFO nova.compute.manager [-] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Took 1.65 seconds to deallocate network for instance. [ 953.854642] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fd74071-58e3-43f7-8b33-f015034c4f21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.867406] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.867406] env[68285]: value = "task-2891586" [ 953.867406] env[68285]: _type = "Task" [ 953.867406] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.872321] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0762d5c3-4cbf-42ef-bade-82a93ed0c94e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.887812] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660eaf88-16a6-479f-88ef-6fc3c2e63147 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.906623] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891586, 'name': CreateVM_Task} progress is 15%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.926771] env[68285]: DEBUG nova.compute.manager [req-0565efa9-f3ac-4c8f-b9b4-49b03805d7d9 req-5cc995d1-c393-4e75-b594-33e1cd7f073b service nova] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Detach interface failed, port_id=03219bf0-d5df-4a05-8632-cb282cf3fa2e, reason: Instance e3b01f87-6a4c-4127-9204-2bfa5ff28f38 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 953.927549] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891585, 'name': Rename_Task, 'duration_secs': 0.170528} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.928554] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a258ba26-fdf1-4d16-bb9e-215ab9aebe0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.934772] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 953.934772] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdace5d8-1cf3-4cfb-b467-909aa08f8fef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.968955] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbe0ff6-9bbe-42b3-a4a2-cfef730962e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.971645] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 953.971645] env[68285]: value = "task-2891587" [ 953.971645] env[68285]: _type = "Task" [ 953.971645] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.980619] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2922d79a-373d-4333-9548-3b7ecec8038e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.989542] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891587, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.001174] env[68285]: DEBUG nova.compute.provider_tree [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.037395] env[68285]: DEBUG nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 954.086454] env[68285]: DEBUG nova.compute.manager [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Received event network-changed-407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.086952] env[68285]: DEBUG nova.compute.manager [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Refreshing instance network info cache due to event network-changed-407962a9-eb0f-4437-a1b8-4513d48c09a0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 954.089048] env[68285]: DEBUG oslo_concurrency.lockutils [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] Acquiring lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.089252] env[68285]: DEBUG oslo_concurrency.lockutils [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] Acquired lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.089431] env[68285]: DEBUG nova.network.neutron [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Refreshing network info cache for port 407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.121243] env[68285]: INFO nova.compute.manager [-] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Took 1.43 seconds to deallocate network for instance. [ 954.144994] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.201184] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.201822] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04f75c39-79df-4ed4-9242-c412a8e49499 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.215542] env[68285]: DEBUG oslo_vmware.api [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 954.215542] env[68285]: value = "task-2891588" [ 954.215542] env[68285]: _type = "Task" [ 954.215542] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.229558] env[68285]: DEBUG oslo_vmware.api [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891588, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.316701] env[68285]: DEBUG nova.compute.manager [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 954.316990] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.318941] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d08c23-e25f-497c-9c83-cbba378b9867 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.327275] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.327275] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73d09ead-625c-4fd8-8ab7-a9db7fbe3384 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.336948] env[68285]: DEBUG oslo_vmware.api [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 954.336948] env[68285]: value = "task-2891589" [ 954.336948] env[68285]: _type = "Task" [ 954.336948] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.343386] env[68285]: DEBUG oslo_vmware.api [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.363179] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.393658] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891586, 'name': CreateVM_Task, 'duration_secs': 0.417642} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.393863] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.394975] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.394975] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.395137] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 954.395655] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b92f67e-9951-4026-814f-625c60c6888e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.405608] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 954.405608] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a321ed-106a-bc9e-c691-485cdaf673eb" [ 954.405608] env[68285]: _type = "Task" [ 954.405608] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.416096] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a321ed-106a-bc9e-c691-485cdaf673eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.481971] env[68285]: DEBUG oslo_vmware.api [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891587, 'name': PowerOnVM_Task, 'duration_secs': 0.523708} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.482257] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 954.482463] env[68285]: INFO nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Took 7.90 seconds to spawn the instance on the hypervisor. [ 954.482659] env[68285]: DEBUG nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 954.483563] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68cb391-22ba-4d1c-8d93-16b0bb3aa4aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.505267] env[68285]: DEBUG nova.scheduler.client.report [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.568089] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.629292] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.726975] env[68285]: DEBUG oslo_vmware.api [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891588, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.850014] env[68285]: DEBUG oslo_vmware.api [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891589, 'name': PowerOffVM_Task, 'duration_secs': 0.285982} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.850327] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.850510] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.850773] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19e84229-278e-4bc2-8bbd-43da2e7feaac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.918745] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a321ed-106a-bc9e-c691-485cdaf673eb, 'name': SearchDatastore_Task, 'duration_secs': 0.014758} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.918970] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.919205] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.919470] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.919617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.919799] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.921067] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32b704e6-bf57-4d6e-9afa-5b23045c7895 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.929561] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.929761] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.930557] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f7c177-9e7c-4cd1-89b7-e867323c5a81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.942233] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 954.942233] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52942551-bf53-d777-b56d-976e6dbb887f" [ 954.942233] env[68285]: _type = "Task" [ 954.942233] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.949229] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52942551-bf53-d777-b56d-976e6dbb887f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.959222] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.959339] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.959587] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Deleting the datastore file [datastore1] 8b473550-4a40-48a5-9e1c-7c48df828e61 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.959777] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56ee18b9-61a9-459f-9aeb-dbbe0b0d5f42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.967574] env[68285]: DEBUG oslo_vmware.api [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for the task: (returnval){ [ 954.967574] env[68285]: value = "task-2891591" [ 954.967574] env[68285]: _type = "Task" [ 954.967574] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.979329] env[68285]: DEBUG oslo_vmware.api [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891591, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.983697] env[68285]: DEBUG nova.network.neutron [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Updated VIF entry in instance network info cache for port 407962a9-eb0f-4437-a1b8-4513d48c09a0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 954.983697] env[68285]: DEBUG nova.network.neutron [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Updating instance_info_cache with network_info: [{"id": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "address": "fa:16:3e:7a:30:a0", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap407962a9-eb", "ovs_interfaceid": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.005170] env[68285]: INFO nova.compute.manager [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Took 43.62 seconds to build instance. [ 955.012059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.776s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.012059] env[68285]: DEBUG nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 955.014066] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.431s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.014373] env[68285]: DEBUG nova.objects.instance [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lazy-loading 'resources' on Instance uuid 940e0328-970d-4f49-a102-d8a00b8c299b {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.228623] env[68285]: DEBUG oslo_vmware.api [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891588, 'name': PowerOnVM_Task, 'duration_secs': 0.534837} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.228976] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.229288] env[68285]: DEBUG nova.compute.manager [None req-7fcb4345-dc72-466c-8f80-471fc7dea2d6 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.230197] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fefd5b2-ae1d-44b0-9019-2c066b3cbff6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.452022] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52942551-bf53-d777-b56d-976e6dbb887f, 'name': SearchDatastore_Task, 'duration_secs': 0.014993} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.452678] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad4a7499-18f5-4bc9-9f10-4db7be7976dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.458243] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 955.458243] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c605fc-ace7-75de-39d3-cc3509b862d7" [ 955.458243] env[68285]: _type = "Task" [ 955.458243] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.466382] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c605fc-ace7-75de-39d3-cc3509b862d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.476596] env[68285]: DEBUG oslo_vmware.api [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Task: {'id': task-2891591, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260243} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.476972] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.477274] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 955.477535] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 955.477754] env[68285]: INFO nova.compute.manager [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Took 1.16 seconds to destroy the instance on the hypervisor. [ 955.478020] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 955.478235] env[68285]: DEBUG nova.compute.manager [-] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 955.478329] env[68285]: DEBUG nova.network.neutron [-] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 955.486735] env[68285]: DEBUG oslo_concurrency.lockutils [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] Releasing lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.486976] env[68285]: DEBUG nova.compute.manager [req-2537e801-e1cf-4846-94e9-84e4d82e0bf7 req-aab818c8-c6cc-4569-9965-99e76e0bb726 service nova] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Received event network-vif-deleted-7e46bb86-86a9-4e35-8965-1477f6e7b8af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 955.510246] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "1dce61a2-0fe2-4384-835c-7e324446d7cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.510439] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.515306] env[68285]: DEBUG nova.compute.utils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 955.516580] env[68285]: DEBUG oslo_concurrency.lockutils [None req-de119f0c-7a5d-46bf-b5bb-52f2cf544275 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.177s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.517037] env[68285]: DEBUG nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 955.517217] env[68285]: DEBUG nova.network.neutron [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 955.570375] env[68285]: DEBUG nova.policy [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6ef4303767040dc98414bfaa2932b85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8da261a2538c41d983fee93cf80ed3c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.665675] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bb0837-782b-4de4-854c-f34de4569dad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.690167] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance 'b3b7f551-81aa-4ac4-9906-020fac5f01f7' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 955.886242] env[68285]: DEBUG nova.network.neutron [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Successfully created port: 45eef779-1d54-4b31-a125-d7c2d144d337 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.974188] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c605fc-ace7-75de-39d3-cc3509b862d7, 'name': SearchDatastore_Task, 'duration_secs': 0.013605} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.974818] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.975067] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f13ad5e7-341f-4475-b334-2144b0923e3b/f13ad5e7-341f-4475-b334-2144b0923e3b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.975247] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b9206cd-942f-4b2c-83aa-731e0accbc20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.984023] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 955.984023] env[68285]: value = "task-2891592" [ 955.984023] env[68285]: _type = "Task" [ 955.984023] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.998944] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.011476] env[68285]: DEBUG nova.compute.manager [req-1d9c3e96-e950-4e85-aa9b-b2f60bc9ef55 req-4a3ab9c7-b603-4244-ac75-85465084751b service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received event network-vif-deleted-5728aafe-c57f-4eb7-a866-ad586f491645 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.011812] env[68285]: INFO nova.compute.manager [req-1d9c3e96-e950-4e85-aa9b-b2f60bc9ef55 req-4a3ab9c7-b603-4244-ac75-85465084751b service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Neutron deleted interface 5728aafe-c57f-4eb7-a866-ad586f491645; detaching it from the instance and deleting it from the info cache [ 956.011972] env[68285]: DEBUG nova.network.neutron [req-1d9c3e96-e950-4e85-aa9b-b2f60bc9ef55 req-4a3ab9c7-b603-4244-ac75-85465084751b service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Updating instance_info_cache with network_info: [{"id": "50c33bdf-4dea-4c86-9423-a50e9db0b741", "address": "fa:16:3e:b0:f7:3e", "network": {"id": "c797a233-8664-4ec5-b973-c903f01e7175", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1416743836", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ae4430b997b4480abbf2c5fce71cca04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50c33bdf-4d", "ovs_interfaceid": "50c33bdf-4dea-4c86-9423-a50e9db0b741", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.018677] env[68285]: DEBUG nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 956.025474] env[68285]: DEBUG nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 956.149068] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "c690490f-9278-4595-8286-d4fd970bbc39" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.150201] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.150201] env[68285]: DEBUG nova.compute.manager [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 956.150876] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30fdb66-dd8f-4248-8d59-4038740e56fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.161299] env[68285]: DEBUG nova.compute.manager [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 956.161953] env[68285]: DEBUG nova.objects.instance [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'flavor' on Instance uuid c690490f-9278-4595-8286-d4fd970bbc39 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.170102] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600dfe2e-b867-410b-872c-40b1fe5617e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.176463] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793d37df-a9d2-494d-a1be-8cb0ca1a424d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.210394] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.211632] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41f6b592-1b49-4881-942c-a824d04d8752 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.213916] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fe7655-13ee-4030-9045-cfeedff4b27b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.224067] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b928b139-cfaa-4ffc-867d-238efe450b81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.228382] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 956.228382] env[68285]: value = "task-2891593" [ 956.228382] env[68285]: _type = "Task" [ 956.228382] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.241609] env[68285]: DEBUG nova.compute.provider_tree [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.248355] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.289298] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527ae4a9-c5a5-8cc5-430a-2b0bfb6f5cde/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 956.291103] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47200c93-06e5-4fbe-b13e-2fb006fe0667 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.298822] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527ae4a9-c5a5-8cc5-430a-2b0bfb6f5cde/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 956.298965] env[68285]: ERROR oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527ae4a9-c5a5-8cc5-430a-2b0bfb6f5cde/disk-0.vmdk due to incomplete transfer. [ 956.299215] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-27ebb11e-6afe-439b-be19-4c7b10404a2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.307779] env[68285]: DEBUG oslo_vmware.rw_handles [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527ae4a9-c5a5-8cc5-430a-2b0bfb6f5cde/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 956.308042] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Uploaded image 5d4138b6-a36b-4e65-84a4-3ee0131980e6 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 956.311159] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 956.311271] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3748501f-419c-4546-baf2-a46d217a3296 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.321030] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 956.321030] env[68285]: value = "task-2891594" [ 956.321030] env[68285]: _type = "Task" [ 956.321030] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.329218] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891594, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.342090] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.342184] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.500516] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891592, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.517648] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bba0c00-868d-43f7-b9d7-3dd826cb59c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.536440] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753ca66c-03c5-4e72-8a7d-395133795e7f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.575898] env[68285]: DEBUG nova.compute.manager [req-1d9c3e96-e950-4e85-aa9b-b2f60bc9ef55 req-4a3ab9c7-b603-4244-ac75-85465084751b service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Detach interface failed, port_id=5728aafe-c57f-4eb7-a866-ad586f491645, reason: Instance 8b473550-4a40-48a5-9e1c-7c48df828e61 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 956.577747] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.739125] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891593, 'name': PowerOffVM_Task, 'duration_secs': 0.330414} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.739433] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.739636] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance 'b3b7f551-81aa-4ac4-9906-020fac5f01f7' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 956.744203] env[68285]: DEBUG nova.scheduler.client.report [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.829018] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891594, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.853955] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.853955] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.853955] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.853955] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.853955] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.853955] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.853955] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 956.853955] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.913438] env[68285]: DEBUG nova.network.neutron [-] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.003042] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537351} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.003042] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f13ad5e7-341f-4475-b334-2144b0923e3b/f13ad5e7-341f-4475-b334-2144b0923e3b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.003042] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.003042] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9db89ed3-5a92-402e-84c7-29defdc58c90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.008143] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 957.008143] env[68285]: value = "task-2891595" [ 957.008143] env[68285]: _type = "Task" [ 957.008143] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.019025] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.028796] env[68285]: DEBUG nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 957.055060] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 957.055404] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.055667] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 957.055898] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.056253] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 957.056515] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 957.056803] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 957.057254] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 957.057483] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 957.057787] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 957.058036] env[68285]: DEBUG nova.virt.hardware [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 957.059603] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa69ca01-e5af-4c94-9e97-48ff89059737 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.070921] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623fcd16-4c90-4c58-8c06-9f2f366f02f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.172602] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.173038] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-301d6b7a-426b-429e-8aee-3020ddf3f2af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.180758] env[68285]: DEBUG oslo_vmware.api [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 957.180758] env[68285]: value = "task-2891596" [ 957.180758] env[68285]: _type = "Task" [ 957.180758] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.188391] env[68285]: DEBUG oslo_vmware.api [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.246078] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 957.246302] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.246949] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 957.246949] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.246949] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 957.247156] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 957.247284] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 957.247663] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 957.247663] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 957.247832] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 957.248370] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 957.253480] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.239s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.255588] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a62ab72-4fda-4a71-b969-db7efddcd5ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.266417] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.316s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.268030] env[68285]: INFO nova.compute.claims [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.278570] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 957.278570] env[68285]: value = "task-2891597" [ 957.278570] env[68285]: _type = "Task" [ 957.278570] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.290438] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891597, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.295934] env[68285]: INFO nova.scheduler.client.report [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Deleted allocations for instance 940e0328-970d-4f49-a102-d8a00b8c299b [ 957.331795] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891594, 'name': Destroy_Task, 'duration_secs': 0.561566} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.332101] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Destroyed the VM [ 957.332360] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 957.332629] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fd49948d-4576-4874-97ce-f8c8841ec545 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.338825] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 957.338825] env[68285]: value = "task-2891598" [ 957.338825] env[68285]: _type = "Task" [ 957.338825] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.347895] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891598, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.356244] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.411181] env[68285]: DEBUG nova.compute.manager [req-3884bc0d-f0d0-4e23-9427-c077dc3bde2e req-41c6f38f-7fc5-468b-8525-01c0f5daf753 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Received event network-vif-plugged-45eef779-1d54-4b31-a125-d7c2d144d337 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 957.411301] env[68285]: DEBUG oslo_concurrency.lockutils [req-3884bc0d-f0d0-4e23-9427-c077dc3bde2e req-41c6f38f-7fc5-468b-8525-01c0f5daf753 service nova] Acquiring lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.411509] env[68285]: DEBUG oslo_concurrency.lockutils [req-3884bc0d-f0d0-4e23-9427-c077dc3bde2e req-41c6f38f-7fc5-468b-8525-01c0f5daf753 service nova] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.411672] env[68285]: DEBUG oslo_concurrency.lockutils [req-3884bc0d-f0d0-4e23-9427-c077dc3bde2e req-41c6f38f-7fc5-468b-8525-01c0f5daf753 service nova] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.411907] env[68285]: DEBUG nova.compute.manager [req-3884bc0d-f0d0-4e23-9427-c077dc3bde2e req-41c6f38f-7fc5-468b-8525-01c0f5daf753 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] No waiting events found dispatching network-vif-plugged-45eef779-1d54-4b31-a125-d7c2d144d337 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 957.412074] env[68285]: WARNING nova.compute.manager [req-3884bc0d-f0d0-4e23-9427-c077dc3bde2e req-41c6f38f-7fc5-468b-8525-01c0f5daf753 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Received unexpected event network-vif-plugged-45eef779-1d54-4b31-a125-d7c2d144d337 for instance with vm_state building and task_state spawning. [ 957.416649] env[68285]: INFO nova.compute.manager [-] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Took 1.94 seconds to deallocate network for instance. [ 957.492970] env[68285]: DEBUG nova.network.neutron [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Successfully updated port: 45eef779-1d54-4b31-a125-d7c2d144d337 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.518522] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070477} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.518798] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.519628] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc08dc86-8136-46cd-ac81-4c9f778e66f8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.541754] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] f13ad5e7-341f-4475-b334-2144b0923e3b/f13ad5e7-341f-4475-b334-2144b0923e3b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.542118] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37bacbf3-0738-4559-85dc-39e172c0b285 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.562705] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 957.562705] env[68285]: value = "task-2891599" [ 957.562705] env[68285]: _type = "Task" [ 957.562705] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.570832] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.690652] env[68285]: DEBUG oslo_vmware.api [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891596, 'name': PowerOffVM_Task, 'duration_secs': 0.299063} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.690925] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.691273] env[68285]: DEBUG nova.compute.manager [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.692103] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e22c064-88a2-45e3-96af-ad90e67b0118 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.790029] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891597, 'name': ReconfigVM_Task, 'duration_secs': 0.274064} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.790029] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance 'b3b7f551-81aa-4ac4-9906-020fac5f01f7' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 957.808649] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c02b737-dad9-4a88-a386-0f9d83e1ca7d tempest-ServerMetadataTestJSON-231718286 tempest-ServerMetadataTestJSON-231718286-project-member] Lock "940e0328-970d-4f49-a102-d8a00b8c299b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.827s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.849730] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891598, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.924711] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.996823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "refresh_cache-14285f6e-10a4-4077-a666-3c8d0cc1b87c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.997779] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquired lock "refresh_cache-14285f6e-10a4-4077-a666-3c8d0cc1b87c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.997779] env[68285]: DEBUG nova.network.neutron [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.042133] env[68285]: DEBUG nova.compute.manager [req-5415144b-9eb3-4d7f-9b5a-b9219f90d44e req-acddb10e-d27d-47fa-a5ac-5a92ffdb08c4 service nova] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Received event network-vif-deleted-50c33bdf-4dea-4c86-9423-a50e9db0b741 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.075100] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891599, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.203754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9496edc5-14f1-4051-922d-fd0ad913ed5d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.298475] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5d08af6e-040e-4fac-974b-cfa5ed6c710a',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1109178364',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.298715] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.298898] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.299105] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.299259] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.299407] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.299899] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.300146] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.300384] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.300589] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.300803] env[68285]: DEBUG nova.virt.hardware [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.306746] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 958.310564] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1103e7ec-3e75-440f-a2b1-3ea939916c6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.330109] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 958.330109] env[68285]: value = "task-2891600" [ 958.330109] env[68285]: _type = "Task" [ 958.330109] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.339880] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891600, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.351777] env[68285]: DEBUG oslo_vmware.api [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891598, 'name': RemoveSnapshot_Task, 'duration_secs': 0.972764} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.352178] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 958.352498] env[68285]: INFO nova.compute.manager [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Took 14.34 seconds to snapshot the instance on the hypervisor. [ 958.544443] env[68285]: DEBUG nova.network.neutron [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.577425] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891599, 'name': ReconfigVM_Task, 'duration_secs': 0.670508} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.580143] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Reconfigured VM instance instance-00000035 to attach disk [datastore2] f13ad5e7-341f-4475-b334-2144b0923e3b/f13ad5e7-341f-4475-b334-2144b0923e3b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.580556] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68285) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 958.582060] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-c93453bb-4565-48e4-9b2f-b40349ad728d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.590262] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 958.590262] env[68285]: value = "task-2891601" [ 958.590262] env[68285]: _type = "Task" [ 958.590262] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.599351] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891601, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.824500] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76dccfae-99f2-4c75-a0b7-798b311651b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.834989] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ddfc11-f049-4e2b-896a-bb2536b25c4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.842651] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891600, 'name': ReconfigVM_Task, 'duration_secs': 0.204259} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.867792] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 958.875347] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc10fbb-c261-47a4-b86d-6ffdb01b44dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.878225] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce1c14d-33b3-4f7d-903c-35d052e8eed9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.897407] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb66aef-d742-461e-938d-3c53c462ea4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.908657] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.910173] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59dc080d-c4c1-48da-bec5-f18e2311865b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.923698] env[68285]: INFO nova.compute.manager [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Rebuilding instance [ 958.937952] env[68285]: DEBUG nova.compute.provider_tree [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.942781] env[68285]: DEBUG nova.compute.manager [None req-265a51e5-a8bb-4199-aad5-d689986ebe5b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Found 1 images (rotation: 2) {{(pid=68285) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 958.948255] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 958.948255] env[68285]: value = "task-2891602" [ 958.948255] env[68285]: _type = "Task" [ 958.948255] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.957778] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.987078] env[68285]: DEBUG nova.compute.manager [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 958.987935] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bad24b4-ef0a-40b1-87fc-5a5037583be7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.002254] env[68285]: DEBUG nova.network.neutron [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Updating instance_info_cache with network_info: [{"id": "45eef779-1d54-4b31-a125-d7c2d144d337", "address": "fa:16:3e:67:d0:40", "network": {"id": "f43186f2-844c-4873-ad76-9f7f2146d2fb", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1883567126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8da261a2538c41d983fee93cf80ed3c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd8b2b4e-f09d-4af6-9759-d372870e9b5f", "external-id": "nsx-vlan-transportzone-800", "segmentation_id": 800, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45eef779-1d", "ovs_interfaceid": "45eef779-1d54-4b31-a125-d7c2d144d337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.098765] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891601, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.241462} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.099132] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68285) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 959.099944] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01768002-17ce-4e13-95b1-55410716366a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.131028] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] f13ad5e7-341f-4475-b334-2144b0923e3b/ephemeral_0.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.131028] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f3b4579-588b-48db-a81b-cf2cdd52dce9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.149144] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 959.149144] env[68285]: value = "task-2891603" [ 959.149144] env[68285]: _type = "Task" [ 959.149144] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.157248] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891603, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.439148] env[68285]: DEBUG nova.scheduler.client.report [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.459080] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891602, 'name': ReconfigVM_Task, 'duration_secs': 0.434946} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.459355] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfigured VM instance instance-0000002b to attach disk [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.459613] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance 'b3b7f551-81aa-4ac4-9906-020fac5f01f7' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 959.491274] env[68285]: DEBUG nova.compute.manager [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Received event network-changed-45eef779-1d54-4b31-a125-d7c2d144d337 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 959.491500] env[68285]: DEBUG nova.compute.manager [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Refreshing instance network info cache due to event network-changed-45eef779-1d54-4b31-a125-d7c2d144d337. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 959.491901] env[68285]: DEBUG oslo_concurrency.lockutils [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] Acquiring lock "refresh_cache-14285f6e-10a4-4077-a666-3c8d0cc1b87c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.511695] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Releasing lock "refresh_cache-14285f6e-10a4-4077-a666-3c8d0cc1b87c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.511695] env[68285]: DEBUG nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Instance network_info: |[{"id": "45eef779-1d54-4b31-a125-d7c2d144d337", "address": "fa:16:3e:67:d0:40", "network": {"id": "f43186f2-844c-4873-ad76-9f7f2146d2fb", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1883567126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8da261a2538c41d983fee93cf80ed3c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd8b2b4e-f09d-4af6-9759-d372870e9b5f", "external-id": "nsx-vlan-transportzone-800", "segmentation_id": 800, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45eef779-1d", "ovs_interfaceid": "45eef779-1d54-4b31-a125-d7c2d144d337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 959.511695] env[68285]: DEBUG oslo_concurrency.lockutils [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] Acquired lock "refresh_cache-14285f6e-10a4-4077-a666-3c8d0cc1b87c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.511695] env[68285]: DEBUG nova.network.neutron [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Refreshing network info cache for port 45eef779-1d54-4b31-a125-d7c2d144d337 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.511695] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:d0:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd8b2b4e-f09d-4af6-9759-d372870e9b5f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45eef779-1d54-4b31-a125-d7c2d144d337', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.518232] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Creating folder: Project (8da261a2538c41d983fee93cf80ed3c5). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.519928] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0894777-e378-433a-b3ba-7f11e925a8a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.531818] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Created folder: Project (8da261a2538c41d983fee93cf80ed3c5) in parent group-v580775. [ 959.532192] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Creating folder: Instances. Parent ref: group-v580924. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.532274] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9eb0d5c0-36d6-4d20-bf7b-03ed284d4067 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.541982] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Created folder: Instances in parent group-v580924. [ 959.542246] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 959.542433] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.542635] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b07916a6-8d40-4aff-963f-c95b7c68646f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.563224] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.563224] env[68285]: value = "task-2891606" [ 959.563224] env[68285]: _type = "Task" [ 959.563224] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.572863] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891606, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.662025] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891603, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.945654] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.946305] env[68285]: DEBUG nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 959.949223] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.714s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.949561] env[68285]: DEBUG nova.objects.instance [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lazy-loading 'resources' on Instance uuid a2a7590d-c415-4955-8a25-4b1411449557 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.966318] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6a1f93-1194-4f41-a95a-78004f270f72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.987448] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c11d6b-84e0-4b67-be09-61de58ebbf96 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.005261] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.005466] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance 'b3b7f551-81aa-4ac4-9906-020fac5f01f7' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 960.009347] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb24b462-adbb-4e74-9acd-0c6e9a7ff0d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.016187] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 960.016187] env[68285]: value = "task-2891607" [ 960.016187] env[68285]: _type = "Task" [ 960.016187] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.029133] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 960.029399] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.030410] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76394d88-552f-41be-aa97-2be97fa5cf9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.037130] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.037247] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4eb16070-562b-4d1a-95c6-f72f0b6c26af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.072116] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891606, 'name': CreateVM_Task, 'duration_secs': 0.395636} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.072344] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.072953] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.073136] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.073451] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 960.073709] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f307f3ec-0212-488b-9675-a7cca4dd955f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.078712] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 960.078712] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5211d2b8-3746-1db7-6620-9ea88167794b" [ 960.078712] env[68285]: _type = "Task" [ 960.078712] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.087184] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5211d2b8-3746-1db7-6620-9ea88167794b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.100707] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.101094] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.101818] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleting the datastore file [datastore2] c690490f-9278-4595-8286-d4fd970bbc39 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.101818] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9eed585c-a0c0-45b3-9ace-478d6abe0421 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.108893] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 960.108893] env[68285]: value = "task-2891609" [ 960.108893] env[68285]: _type = "Task" [ 960.108893] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.120709] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891609, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.163924] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891603, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.183934] env[68285]: DEBUG nova.compute.manager [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.183934] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c96c110-c240-44c4-97b5-4eadca7679a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.263249] env[68285]: DEBUG nova.network.neutron [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Updated VIF entry in instance network info cache for port 45eef779-1d54-4b31-a125-d7c2d144d337. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.263592] env[68285]: DEBUG nova.network.neutron [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Updating instance_info_cache with network_info: [{"id": "45eef779-1d54-4b31-a125-d7c2d144d337", "address": "fa:16:3e:67:d0:40", "network": {"id": "f43186f2-844c-4873-ad76-9f7f2146d2fb", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1883567126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8da261a2538c41d983fee93cf80ed3c5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd8b2b4e-f09d-4af6-9759-d372870e9b5f", "external-id": "nsx-vlan-transportzone-800", "segmentation_id": 800, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45eef779-1d", "ovs_interfaceid": "45eef779-1d54-4b31-a125-d7c2d144d337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.452563] env[68285]: DEBUG nova.compute.utils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 960.456754] env[68285]: DEBUG nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 960.543336] env[68285]: DEBUG nova.network.neutron [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Port 567381f7-5f78-4920-beb9-db0ef3479244 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 960.587851] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5211d2b8-3746-1db7-6620-9ea88167794b, 'name': SearchDatastore_Task, 'duration_secs': 0.010789} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.590249] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.590478] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.590851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.590851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.591022] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.591914] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4fc5756-7ec1-4a97-8854-adb7e450dccb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.599965] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.600188] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.602884] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-825e3496-f879-40d6-a88c-2acb30e97f84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.608040] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 960.608040] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5239146d-e1f9-23e0-7743-010096a8a929" [ 960.608040] env[68285]: _type = "Task" [ 960.608040] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.619592] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5239146d-e1f9-23e0-7743-010096a8a929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.623517] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891609, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192262} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.624060] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.624249] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.624422] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.663239] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891603, 'name': ReconfigVM_Task, 'duration_secs': 1.378852} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.663510] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Reconfigured VM instance instance-00000035 to attach disk [datastore2] f13ad5e7-341f-4475-b334-2144b0923e3b/ephemeral_0.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.664359] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2780120d-5a54-45f1-8151-ce9a88a4608b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.670338] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 960.670338] env[68285]: value = "task-2891610" [ 960.670338] env[68285]: _type = "Task" [ 960.670338] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.680245] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891610, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.695694] env[68285]: INFO nova.compute.manager [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] instance snapshotting [ 960.696326] env[68285]: DEBUG nova.objects.instance [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'flavor' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.766626] env[68285]: DEBUG oslo_concurrency.lockutils [req-ffa1a7c7-2ee6-4d10-b684-dbdbc77c99e6 req-e0fd54f8-0a2d-4877-84b0-39938b5b77f1 service nova] Releasing lock "refresh_cache-14285f6e-10a4-4077-a666-3c8d0cc1b87c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.888167] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ed1853-2d2a-46a2-a284-6a85a7f4de22 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.895428] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45ab99a-f6ae-46c1-a8ca-14af73058bb4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.928255] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4864e161-d53c-4c1e-81cb-e62d2a664ef2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.935431] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7aec33-839b-41e2-a949-f365b1a11b48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.949326] env[68285]: DEBUG nova.compute.provider_tree [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.958245] env[68285]: DEBUG nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 961.118887] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5239146d-e1f9-23e0-7743-010096a8a929, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.119358] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1730c03f-ef86-4cb7-b1d5-b77c9abe8884 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.125207] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 961.125207] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5223dd25-cb20-728f-26e6-9020479e80db" [ 961.125207] env[68285]: _type = "Task" [ 961.125207] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.137082] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5223dd25-cb20-728f-26e6-9020479e80db, 'name': SearchDatastore_Task, 'duration_secs': 0.008872} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.137334] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.137580] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 14285f6e-10a4-4077-a666-3c8d0cc1b87c/14285f6e-10a4-4077-a666-3c8d0cc1b87c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.137853] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc7023a0-3135-4852-bcaf-ce12ecb89812 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.143615] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 961.143615] env[68285]: value = "task-2891611" [ 961.143615] env[68285]: _type = "Task" [ 961.143615] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.152203] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891611, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.180595] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891610, 'name': Rename_Task, 'duration_secs': 0.474701} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.180875] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.181131] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d84236c-815a-4db2-85de-194a046070e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.186605] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 961.186605] env[68285]: value = "task-2891612" [ 961.186605] env[68285]: _type = "Task" [ 961.186605] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.194597] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891612, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.202291] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5b06da-2cb1-4582-b8c7-90f557f18aa1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.221779] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c964e22d-3300-4ce5-9f73-68d07391017c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.456823] env[68285]: DEBUG nova.scheduler.client.report [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.577616] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.577616] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.577616] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.653793] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891611, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490591} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.655816] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 14285f6e-10a4-4077-a666-3c8d0cc1b87c/14285f6e-10a4-4077-a666-3c8d0cc1b87c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.656065] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.656488] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5f8a056-f756-4f60-a9e4-8d9c7148ff60 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.662402] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.662561] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.662718] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.662899] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.663060] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.663314] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.663540] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.663703] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.663872] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.664050] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.664241] env[68285]: DEBUG nova.virt.hardware [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.665040] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0130df89-dd4a-42e8-874e-da6f7b81f747 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.669026] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 961.669026] env[68285]: value = "task-2891613" [ 961.669026] env[68285]: _type = "Task" [ 961.669026] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.675614] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ff364d-eca6-4f7c-bbfc-3525a62ace97 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.682512] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891613, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.692216] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:90:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38ebf797-d9b9-4c8d-8159-fdf3be92518b', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 961.699525] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.702511] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 961.702741] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97c462c7-ccb9-46b5-87a3-21e1c95cb34b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.722277] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891612, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.723556] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 961.723556] env[68285]: value = "task-2891614" [ 961.723556] env[68285]: _type = "Task" [ 961.723556] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.731122] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891614, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.732145] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 961.732397] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-52011363-622e-4ca2-af51-44344734e800 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.744458] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 961.744458] env[68285]: value = "task-2891615" [ 961.744458] env[68285]: _type = "Task" [ 961.744458] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.753432] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891615, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.963397] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.965689] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.588s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.967310] env[68285]: INFO nova.compute.claims [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 961.970898] env[68285]: DEBUG nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 961.993342] env[68285]: INFO nova.scheduler.client.report [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Deleted allocations for instance a2a7590d-c415-4955-8a25-4b1411449557 [ 961.997010] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.997010] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.997233] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.997233] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.997661] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.997661] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.997738] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.999031] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.999031] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.999031] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.999031] env[68285]: DEBUG nova.virt.hardware [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.999338] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4593e063-0d65-427c-8923-e2dc0cf0ed33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.011854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e464de-678c-4087-9b48-d6fefa2ad1da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.025832] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.032131] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Creating folder: Project (b550daefa1584080b3d761d3f3ae2956). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 962.032738] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b9824cb-5030-4075-89ea-a2c6683b4ad4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.044984] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Created folder: Project (b550daefa1584080b3d761d3f3ae2956) in parent group-v580775. [ 962.045200] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Creating folder: Instances. Parent ref: group-v580928. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 962.045462] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ed074c0-90f2-4471-80cb-c2bec1962215 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.062817] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Created folder: Instances in parent group-v580928. [ 962.063094] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 962.063303] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.063527] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc6f09a1-ee6e-4c19-b010-b68185dbcde9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.081876] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.081876] env[68285]: value = "task-2891618" [ 962.081876] env[68285]: _type = "Task" [ 962.081876] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.092431] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891618, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.181907] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891613, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075081} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.182252] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.183099] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8069bd-e0a7-4e0a-957a-fee9fa8562e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.206856] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 14285f6e-10a4-4077-a666-3c8d0cc1b87c/14285f6e-10a4-4077-a666-3c8d0cc1b87c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.210110] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73e9c05e-0574-456f-9540-af284e8a6f17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.233092] env[68285]: DEBUG oslo_vmware.api [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2891612, 'name': PowerOnVM_Task, 'duration_secs': 0.604069} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.235227] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.235612] env[68285]: INFO nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Took 10.13 seconds to spawn the instance on the hypervisor. [ 962.235803] env[68285]: DEBUG nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.236298] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 962.236298] env[68285]: value = "task-2891619" [ 962.236298] env[68285]: _type = "Task" [ 962.236298] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.241533] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d158dbc5-bcc8-4a31-bc66-a7b86db01e32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.245145] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891614, 'name': CreateVM_Task, 'duration_secs': 0.366212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.249505] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 962.254855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.255101] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.255517] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 962.256330] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6acecf29-39c6-479f-8c70-0d826ee936cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.265745] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.275123] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891615, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.277649] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 962.277649] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52292c61-6293-c9fa-7c8c-74c838270b85" [ 962.277649] env[68285]: _type = "Task" [ 962.277649] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.286596] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52292c61-6293-c9fa-7c8c-74c838270b85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.511891] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0fdda0a5-d924-4a16-82c6-1aa14410dc23 tempest-ServersTestFqdnHostnames-392728445 tempest-ServersTestFqdnHostnames-392728445-project-member] Lock "a2a7590d-c415-4955-8a25-4b1411449557" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.998s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.595196] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891618, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.654261] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.654470] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.654644] env[68285]: DEBUG nova.network.neutron [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.762043] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891619, 'name': ReconfigVM_Task, 'duration_secs': 0.32654} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.762302] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891615, 'name': CreateSnapshot_Task, 'duration_secs': 0.623926} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.762546] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 14285f6e-10a4-4077-a666-3c8d0cc1b87c/14285f6e-10a4-4077-a666-3c8d0cc1b87c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.763597] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 962.763597] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67990161-a265-47cf-bf82-3e922b7bb6d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.765461] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb38d936-978a-4744-984f-8c1092011e32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.780522] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 962.780522] env[68285]: value = "task-2891620" [ 962.780522] env[68285]: _type = "Task" [ 962.780522] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.794387] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52292c61-6293-c9fa-7c8c-74c838270b85, 'name': SearchDatastore_Task, 'duration_secs': 0.012294} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.794840] env[68285]: INFO nova.compute.manager [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Took 47.59 seconds to build instance. [ 962.799210] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.799485] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.799956] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.800072] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.800240] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.800543] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891620, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.801030] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af54b174-0426-433e-82ab-e3ae870c61b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.809984] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.810244] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.811799] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f73d482a-4bb8-420b-80d1-d255279b1889 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.818312] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 962.818312] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5263d75b-34ee-4596-76dd-5be488a68446" [ 962.818312] env[68285]: _type = "Task" [ 962.818312] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.826865] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5263d75b-34ee-4596-76dd-5be488a68446, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.099921] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891618, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.289580] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 963.292459] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-887e333a-6de1-4b33-a53b-2b522ce0a398 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.301697] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4296e44b-533f-4115-8131-92eda071cc04 tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.685s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.307034] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891620, 'name': Rename_Task, 'duration_secs': 0.15382} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.311062] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.311062] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 963.311062] env[68285]: value = "task-2891621" [ 963.311062] env[68285]: _type = "Task" [ 963.311062] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.311062] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3c362ae-4823-40ea-b97d-2c7da32d7076 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.329664] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 963.329664] env[68285]: value = "task-2891622" [ 963.329664] env[68285]: _type = "Task" [ 963.329664] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.329887] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891621, 'name': CloneVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.345609] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5263d75b-34ee-4596-76dd-5be488a68446, 'name': SearchDatastore_Task, 'duration_secs': 0.008377} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.346953] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-790a479b-4706-4866-a974-1986815060b2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.353428] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891622, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.356686] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 963.356686] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529649bc-9878-2dbc-de1e-14284a6b24ed" [ 963.356686] env[68285]: _type = "Task" [ 963.356686] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.370354] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529649bc-9878-2dbc-de1e-14284a6b24ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.434760] env[68285]: DEBUG nova.network.neutron [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance_info_cache with network_info: [{"id": "567381f7-5f78-4920-beb9-db0ef3479244", "address": "fa:16:3e:68:fd:5e", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap567381f7-5f", "ovs_interfaceid": "567381f7-5f78-4920-beb9-db0ef3479244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.522091] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c279c1-24fb-4625-95f9-9ed41c2998f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.530925] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981d2088-7920-4edc-8d9d-61f9de0bab5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.561836] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24d5cf1-c92f-47c7-8120-6f226e5e2cca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.569742] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0cfd6f8-e107-44b0-aff8-e84f9fb30346 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.585187] env[68285]: DEBUG nova.compute.provider_tree [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.598779] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891618, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.808421] env[68285]: DEBUG nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 963.832114] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891621, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.844475] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.871584] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529649bc-9878-2dbc-de1e-14284a6b24ed, 'name': SearchDatastore_Task, 'duration_secs': 0.017038} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.871875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.872594] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.872594] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eea741fb-9c8a-4467-98b9-6b7df233aad4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.881498] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 963.881498] env[68285]: value = "task-2891623" [ 963.881498] env[68285]: _type = "Task" [ 963.881498] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.891292] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.937656] env[68285]: DEBUG oslo_concurrency.lockutils [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.089763] env[68285]: DEBUG nova.scheduler.client.report [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.104087] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891618, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.206117] env[68285]: DEBUG nova.compute.manager [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Received event network-changed-407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 964.206117] env[68285]: DEBUG nova.compute.manager [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Refreshing instance network info cache due to event network-changed-407962a9-eb0f-4437-a1b8-4513d48c09a0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 964.206358] env[68285]: DEBUG oslo_concurrency.lockutils [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] Acquiring lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.206514] env[68285]: DEBUG oslo_concurrency.lockutils [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] Acquired lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.206727] env[68285]: DEBUG nova.network.neutron [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Refreshing network info cache for port 407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 964.324655] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891621, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.348226] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891622, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.355966] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.394972] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891623, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.477782] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9695833-2dce-47c6-87e3-c19b40bcf611 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.502363] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761634da-fc68-4a86-8951-103d1598e1fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.512423] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance 'b3b7f551-81aa-4ac4-9906-020fac5f01f7' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 964.598576] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.599160] env[68285]: DEBUG nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 964.602744] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.147s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.603091] env[68285]: DEBUG nova.objects.instance [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lazy-loading 'resources' on Instance uuid 437a18da-8fe4-478e-82a0-3b1a9da47df8 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.617409] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891618, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.836491] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891621, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.846554] env[68285]: DEBUG oslo_vmware.api [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891622, 'name': PowerOnVM_Task, 'duration_secs': 1.035925} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.847024] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.847137] env[68285]: INFO nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Took 7.82 seconds to spawn the instance on the hypervisor. [ 964.847473] env[68285]: DEBUG nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.848765] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfc75a9-fdc0-4cb0-ab72-bed9eb00da30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.894119] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891623, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624636} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.896393] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 964.896616] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 964.896893] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b1f514c-2d2f-4053-a6cd-0d713b7e926f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.907297] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 964.907297] env[68285]: value = "task-2891624" [ 964.907297] env[68285]: _type = "Task" [ 964.907297] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.925065] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.979626] env[68285]: DEBUG nova.network.neutron [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Updated VIF entry in instance network info cache for port 407962a9-eb0f-4437-a1b8-4513d48c09a0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.979626] env[68285]: DEBUG nova.network.neutron [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Updating instance_info_cache with network_info: [{"id": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "address": "fa:16:3e:7a:30:a0", "network": {"id": "f7a43f6f-f31d-4b2c-a140-2b4d0375ff45", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1101832130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53c6c9c73f07454fbe69beeee428a15a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap407962a9-eb", "ovs_interfaceid": "407962a9-eb0f-4437-a1b8-4513d48c09a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.019726] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.020037] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2023239-b133-4262-95af-4ff6b0541b77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.029437] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 965.029437] env[68285]: value = "task-2891625" [ 965.029437] env[68285]: _type = "Task" [ 965.029437] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.045317] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.103954] env[68285]: DEBUG nova.compute.utils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 965.110747] env[68285]: DEBUG nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 965.111571] env[68285]: DEBUG nova.network.neutron [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.115746] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891618, 'name': CreateVM_Task, 'duration_secs': 2.850166} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.116554] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.118581] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.118581] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.118817] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.119100] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c4038e2-9dd1-42e1-bf4f-d3dfaa135f37 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.128029] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 965.128029] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e50d79-4ec8-8f6a-6e5a-ead74fd840de" [ 965.128029] env[68285]: _type = "Task" [ 965.128029] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.138349] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e50d79-4ec8-8f6a-6e5a-ead74fd840de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.170227] env[68285]: DEBUG nova.policy [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b1407761c80e4451a5725ac25ed91450', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '31396f8bc32b48e883ef6bd7c38ad3c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 965.324973] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891621, 'name': CloneVM_Task, 'duration_secs': 1.698463} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.327998] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Created linked-clone VM from snapshot [ 965.329160] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbb6cad-ddf6-4ef9-a518-0c8aba3e521f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.337284] env[68285]: DEBUG nova.objects.instance [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lazy-loading 'flavor' on Instance uuid 87582063-50f9-4518-ad2d-915c9cd49b19 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.342526] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Uploading image 81d960e4-b749-453c-ada9-72371e3f563e {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 965.371896] env[68285]: INFO nova.compute.manager [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Took 48.23 seconds to build instance. [ 965.379563] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 965.379563] env[68285]: value = "vm-580932" [ 965.379563] env[68285]: _type = "VirtualMachine" [ 965.379563] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 965.379840] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4e19a81a-9296-41f4-bc89-e720b72e5ea4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.390428] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease: (returnval){ [ 965.390428] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523fd69e-90a1-58bf-88f4-065345d62f01" [ 965.390428] env[68285]: _type = "HttpNfcLease" [ 965.390428] env[68285]: } obtained for exporting VM: (result){ [ 965.390428] env[68285]: value = "vm-580932" [ 965.390428] env[68285]: _type = "VirtualMachine" [ 965.390428] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 965.390428] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the lease: (returnval){ [ 965.390428] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523fd69e-90a1-58bf-88f4-065345d62f01" [ 965.390428] env[68285]: _type = "HttpNfcLease" [ 965.390428] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 965.402433] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 965.402433] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523fd69e-90a1-58bf-88f4-065345d62f01" [ 965.402433] env[68285]: _type = "HttpNfcLease" [ 965.402433] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 965.422715] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13834} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.422715] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.423618] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e04f5c-43d2-401b-aab1-c11f9975b02c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.450044] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.453400] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a60a377f-a9ae-4c21-8112-af70e2ac40a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.473838] env[68285]: DEBUG nova.network.neutron [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Successfully created port: ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 965.483946] env[68285]: DEBUG oslo_concurrency.lockutils [req-5dd2a69c-8ccf-4972-aa78-64f7a5c78dc9 req-670c9f8b-3bbc-41cc-8921-4238da67985a service nova] Releasing lock "refresh_cache-f13ad5e7-341f-4475-b334-2144b0923e3b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.484794] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 965.484794] env[68285]: value = "task-2891627" [ 965.484794] env[68285]: _type = "Task" [ 965.484794] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.505334] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891627, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.540571] env[68285]: DEBUG oslo_vmware.api [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891625, 'name': PowerOnVM_Task, 'duration_secs': 0.481709} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.545750] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.545750] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-802100c4-dcb3-46b4-8e07-25ccbcb3f683 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance 'b3b7f551-81aa-4ac4-9906-020fac5f01f7' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 965.616775] env[68285]: DEBUG nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 965.642754] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e50d79-4ec8-8f6a-6e5a-ead74fd840de, 'name': SearchDatastore_Task, 'duration_secs': 0.042395} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.642754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.642754] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.642933] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.642979] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.643162] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.643436] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5c6dd3d-6ee7-46a0-8564-be6f4de6f770 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.662512] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.662783] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.666466] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa0f5275-3d0b-467d-81a2-c27945eaab56 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.674170] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 965.674170] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523361e6-744e-1897-1e84-eceeedddfabd" [ 965.674170] env[68285]: _type = "Task" [ 965.674170] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.682307] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523361e6-744e-1897-1e84-eceeedddfabd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.721082] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3212d3-1388-456d-9a75-4296d8515715 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.732193] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef2f934-bebb-4079-9246-b77a1413ba32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.767996] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a85be7-9850-4b70-9d6c-80a2aac549f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.778246] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6539e82e-db3e-4974-9e45-24ec0bf19cfc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.793462] env[68285]: DEBUG nova.compute.provider_tree [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.851971] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.852196] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquired lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.874115] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c959dc39-019b-4a60-a131-a5031f7f404d tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.135s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.898689] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 965.898689] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523fd69e-90a1-58bf-88f4-065345d62f01" [ 965.898689] env[68285]: _type = "HttpNfcLease" [ 965.898689] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 965.898969] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 965.898969] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523fd69e-90a1-58bf-88f4-065345d62f01" [ 965.898969] env[68285]: _type = "HttpNfcLease" [ 965.898969] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 965.899688] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268e870c-e900-4d97-8459-403855a77bae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.906902] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525418ea-d913-826b-0560-da9ae488a209/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 965.907094] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525418ea-d913-826b-0560-da9ae488a209/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 965.994892] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.062860] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1bbea42f-9095-478b-947a-2733aa87c5f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.184582] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523361e6-744e-1897-1e84-eceeedddfabd, 'name': SearchDatastore_Task, 'duration_secs': 0.058763} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.185428] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b61e7f3f-3789-42bc-8ec6-16822dd52999 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.190635] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 966.190635] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5259a083-5eba-1258-01e8-bda3047662b9" [ 966.190635] env[68285]: _type = "Task" [ 966.190635] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.205081] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "2a9b3b56-8607-4da8-9186-8a933cfe0351" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.205386] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "2a9b3b56-8607-4da8-9186-8a933cfe0351" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.206635] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5259a083-5eba-1258-01e8-bda3047662b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.296580] env[68285]: DEBUG nova.scheduler.client.report [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.496185] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891627, 'name': ReconfigVM_Task, 'duration_secs': 0.873335} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.496185] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Reconfigured VM instance instance-00000034 to attach disk [datastore1] c690490f-9278-4595-8286-d4fd970bbc39/c690490f-9278-4595-8286-d4fd970bbc39.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.496945] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cd3b540-9fb0-4a44-a3e9-50e294e60986 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.503523] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 966.503523] env[68285]: value = "task-2891628" [ 966.503523] env[68285]: _type = "Task" [ 966.503523] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.513463] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891628, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.549376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.549376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.549376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.549376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.549376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.552026] env[68285]: INFO nova.compute.manager [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Terminating instance [ 966.631780] env[68285]: DEBUG nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 966.661148] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 966.661478] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 966.661689] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 966.661889] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 966.662374] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 966.662580] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 966.662898] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 966.663110] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 966.663346] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 966.663569] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 966.663795] env[68285]: DEBUG nova.virt.hardware [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 966.664714] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f0a908-8a22-4c25-bbea-140159b713a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.673348] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efea225-909d-4d60-8c5c-548dc504f978 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.701551] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5259a083-5eba-1258-01e8-bda3047662b9, 'name': SearchDatastore_Task, 'duration_secs': 0.01079} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.701551] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.701551] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.701551] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2c23725-b528-4356-9884-3104db553491 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.709316] env[68285]: DEBUG nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 966.714030] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 966.714030] env[68285]: value = "task-2891629" [ 966.714030] env[68285]: _type = "Task" [ 966.714030] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.724872] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891629, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.745107] env[68285]: DEBUG nova.network.neutron [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.801992] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.199s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.806060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.093s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.806060] env[68285]: DEBUG nova.objects.instance [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 966.828274] env[68285]: INFO nova.scheduler.client.report [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleted allocations for instance 437a18da-8fe4-478e-82a0-3b1a9da47df8 [ 967.006194] env[68285]: DEBUG nova.compute.manager [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Received event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.006194] env[68285]: DEBUG nova.compute.manager [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing instance network info cache due to event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 967.006194] env[68285]: DEBUG oslo_concurrency.lockutils [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] Acquiring lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.020612] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891628, 'name': Rename_Task, 'duration_secs': 0.22047} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.021130] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.021518] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f19b5eb-f9f9-44cc-9e29-6f35edca5c93 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.029464] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 967.029464] env[68285]: value = "task-2891630" [ 967.029464] env[68285]: _type = "Task" [ 967.029464] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.045777] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.058706] env[68285]: DEBUG nova.network.neutron [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Successfully updated port: ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 967.069998] env[68285]: DEBUG nova.compute.manager [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 967.069998] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 967.071254] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b977114-311a-4e72-b03f-02bf7acb7425 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.081749] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.082112] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11012d85-c0bd-41da-b402-09e27fd3b7ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.091235] env[68285]: DEBUG oslo_vmware.api [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 967.091235] env[68285]: value = "task-2891631" [ 967.091235] env[68285]: _type = "Task" [ 967.091235] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.103340] env[68285]: DEBUG oslo_vmware.api [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.109484] env[68285]: DEBUG nova.compute.manager [req-2ff601b7-f491-490b-8540-3e303a2cec29 req-1d605e29-b20d-486f-b864-85be3e632e01 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received event network-vif-plugged-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.109873] env[68285]: DEBUG oslo_concurrency.lockutils [req-2ff601b7-f491-490b-8540-3e303a2cec29 req-1d605e29-b20d-486f-b864-85be3e632e01 service nova] Acquiring lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.110120] env[68285]: DEBUG oslo_concurrency.lockutils [req-2ff601b7-f491-490b-8540-3e303a2cec29 req-1d605e29-b20d-486f-b864-85be3e632e01 service nova] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.110305] env[68285]: DEBUG oslo_concurrency.lockutils [req-2ff601b7-f491-490b-8540-3e303a2cec29 req-1d605e29-b20d-486f-b864-85be3e632e01 service nova] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.110719] env[68285]: DEBUG nova.compute.manager [req-2ff601b7-f491-490b-8540-3e303a2cec29 req-1d605e29-b20d-486f-b864-85be3e632e01 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] No waiting events found dispatching network-vif-plugged-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 967.110895] env[68285]: WARNING nova.compute.manager [req-2ff601b7-f491-490b-8540-3e303a2cec29 req-1d605e29-b20d-486f-b864-85be3e632e01 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received unexpected event network-vif-plugged-ee14be75-4848-4471-9d06-29e7a06446fd for instance with vm_state building and task_state spawning. [ 967.229893] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891629, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.240593] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.335281] env[68285]: DEBUG oslo_concurrency.lockutils [None req-63b1c613-91d6-4e9a-8df1-6db69ec5109c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "437a18da-8fe4-478e-82a0-3b1a9da47df8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.501s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.505671] env[68285]: DEBUG nova.network.neutron [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.541221] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891630, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.571652] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.571808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquired lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.571975] env[68285]: DEBUG nova.network.neutron [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.601756] env[68285]: DEBUG oslo_vmware.api [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891631, 'name': PowerOffVM_Task, 'duration_secs': 0.255274} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.601756] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.602525] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 967.602904] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcfc2cc2-f1ae-4f89-a4fb-43e2897fafda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.668941] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 967.669222] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 967.669420] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Deleting the datastore file [datastore1] 14285f6e-10a4-4077-a666-3c8d0cc1b87c {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.669716] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-188d74da-2808-4ecd-aafd-c5123ce54946 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.677380] env[68285]: DEBUG oslo_vmware.api [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for the task: (returnval){ [ 967.677380] env[68285]: value = "task-2891633" [ 967.677380] env[68285]: _type = "Task" [ 967.677380] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.685703] env[68285]: DEBUG oslo_vmware.api [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891633, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.727025] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891629, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556387} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.729035] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.729035] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.729035] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8efbbef-c9a3-4cd6-bff2-7852a872b1c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.734845] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 967.734845] env[68285]: value = "task-2891634" [ 967.734845] env[68285]: _type = "Task" [ 967.734845] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.745873] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891634, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.815891] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6a10222a-a281-45ab-b581-a05fba55fcf6 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.817864] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.511s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.819922] env[68285]: INFO nova.compute.claims [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 968.011202] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Releasing lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.011459] env[68285]: DEBUG nova.compute.manager [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Inject network info {{(pid=68285) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 968.011712] env[68285]: DEBUG nova.compute.manager [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] network_info to inject: |[{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 968.016665] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Reconfiguring VM instance to set the machine id {{(pid=68285) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 968.017096] env[68285]: DEBUG oslo_concurrency.lockutils [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] Acquired lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.017301] env[68285]: DEBUG nova.network.neutron [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.018927] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e96e131c-acad-4698-9fe7-0471155f5f0e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.043140] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891630, 'name': PowerOnVM_Task, 'duration_secs': 0.877376} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.045028] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.045341] env[68285]: DEBUG nova.compute.manager [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.046192] env[68285]: DEBUG oslo_vmware.api [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 968.046192] env[68285]: value = "task-2891635" [ 968.046192] env[68285]: _type = "Task" [ 968.046192] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.046410] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de53a4f0-92c8-43c8-b4ef-a29986e62aeb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.063306] env[68285]: DEBUG oslo_vmware.api [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891635, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.113041] env[68285]: DEBUG nova.network.neutron [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 968.192728] env[68285]: DEBUG oslo_vmware.api [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Task: {'id': task-2891633, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241171} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.193081] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.193319] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 968.193538] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 968.193776] env[68285]: INFO nova.compute.manager [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 968.194040] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 968.194318] env[68285]: DEBUG nova.compute.manager [-] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 968.194417] env[68285]: DEBUG nova.network.neutron [-] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 968.229768] env[68285]: DEBUG nova.network.neutron [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.243713] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891634, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097672} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.244041] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.249351] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a726961-46bc-4afa-a159-0287063566a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.272445] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.273144] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf9742ce-dd9b-4a23-bbca-b46903fefb89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.301298] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 968.301298] env[68285]: value = "task-2891636" [ 968.301298] env[68285]: _type = "Task" [ 968.301298] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.310165] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.336269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.336269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.336269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.336269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.336269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.339942] env[68285]: INFO nova.compute.manager [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Terminating instance [ 968.435788] env[68285]: DEBUG nova.network.neutron [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Port 567381f7-5f78-4920-beb9-db0ef3479244 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 968.436047] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.436577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.436577] env[68285]: DEBUG nova.network.neutron [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.461960] env[68285]: DEBUG nova.objects.instance [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lazy-loading 'flavor' on Instance uuid 87582063-50f9-4518-ad2d-915c9cd49b19 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.559714] env[68285]: DEBUG oslo_vmware.api [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891635, 'name': ReconfigVM_Task, 'duration_secs': 0.173964} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.560012] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac46ad7-cbbb-400a-86cb-3793c4f59f91 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Reconfigured VM instance to set the machine id {{(pid=68285) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 968.566977] env[68285]: INFO nova.compute.manager [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] bringing vm to original state: 'stopped' [ 968.616420] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.616658] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.733826] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Releasing lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.734165] env[68285]: DEBUG nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Instance network_info: |[{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 968.734626] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:c8:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c405e9f-a6c8-4308-acac-071654efe18e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee14be75-4848-4471-9d06-29e7a06446fd', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 968.742195] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Creating folder: Project (31396f8bc32b48e883ef6bd7c38ad3c0). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 968.742831] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0b60f31-95bd-496a-8548-a1661a38a23b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.755042] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Created folder: Project (31396f8bc32b48e883ef6bd7c38ad3c0) in parent group-v580775. [ 968.755247] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Creating folder: Instances. Parent ref: group-v580933. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 968.755485] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe12bf74-3544-44d8-9d07-9fa29e0397a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.766084] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Created folder: Instances in parent group-v580933. [ 968.766355] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 968.766565] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 968.766801] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04af45ce-a5dd-4e6e-b4bf-ae1dd7017f90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.781879] env[68285]: DEBUG nova.network.neutron [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updated VIF entry in instance network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 968.782283] env[68285]: DEBUG nova.network.neutron [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.787368] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 968.787368] env[68285]: value = "task-2891639" [ 968.787368] env[68285]: _type = "Task" [ 968.787368] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.796388] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891639, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.810771] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.845032] env[68285]: DEBUG nova.compute.manager [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.845190] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.846650] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ac907c-4b0c-4ba0-b0c1-0dcc6a30ee2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.856229] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.856591] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28077e42-916e-4b4b-a143-cc58903e3b43 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.864069] env[68285]: DEBUG oslo_vmware.api [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 968.864069] env[68285]: value = "task-2891640" [ 968.864069] env[68285]: _type = "Task" [ 968.864069] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.876827] env[68285]: DEBUG oslo_vmware.api [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.969055] env[68285]: DEBUG oslo_concurrency.lockutils [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.042195] env[68285]: DEBUG nova.network.neutron [-] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.098594] env[68285]: DEBUG nova.compute.manager [req-2d75feb6-8672-4f8a-99b1-599b81c1f61c req-19c1aa54-4894-4ffc-81f0-d5e703fa03a6 service nova] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Received event network-vif-deleted-45eef779-1d54-4b31-a125-d7c2d144d337 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 969.119898] env[68285]: DEBUG nova.compute.utils [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 969.193227] env[68285]: DEBUG nova.network.neutron [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance_info_cache with network_info: [{"id": "567381f7-5f78-4920-beb9-db0ef3479244", "address": "fa:16:3e:68:fd:5e", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap567381f7-5f", "ovs_interfaceid": "567381f7-5f78-4920-beb9-db0ef3479244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.284888] env[68285]: DEBUG oslo_concurrency.lockutils [req-2ae319c4-5688-4e3c-a5ee-fc094445fbfd req-7f934fb5-b53b-42c3-a597-eb79244b2e15 service nova] Releasing lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.285323] env[68285]: DEBUG oslo_concurrency.lockutils [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquired lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.299012] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891639, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.311985] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891636, 'name': ReconfigVM_Task, 'duration_secs': 0.882999} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.311985] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.314650] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9cdae463-d4e2-4b31-a22d-19cb19e8015c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.321641] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 969.321641] env[68285]: value = "task-2891641" [ 969.321641] env[68285]: _type = "Task" [ 969.321641] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.331181] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891641, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.351269] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700caf32-560f-4d8b-b9f4-3eb8cfe760e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.358725] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9f87c9-ab70-43cb-ac3d-830b7f1551d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.397890] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3eece24-75ec-4d01-b9b4-7910f1a0c73e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.401642] env[68285]: DEBUG nova.compute.manager [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 969.401826] env[68285]: DEBUG nova.compute.manager [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing instance network info cache due to event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 969.402080] env[68285]: DEBUG oslo_concurrency.lockutils [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] Acquiring lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.402234] env[68285]: DEBUG oslo_concurrency.lockutils [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] Acquired lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.402394] env[68285]: DEBUG nova.network.neutron [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.410482] env[68285]: DEBUG oslo_vmware.api [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891640, 'name': PowerOffVM_Task, 'duration_secs': 0.222267} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.413187] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.413425] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.414554] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ccc5cdf-664d-45bb-ba3f-e6b6cb7c9d12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.417388] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b99ffbc-19c2-465a-9b36-fccb66e5c0a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.435485] env[68285]: DEBUG nova.compute.provider_tree [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.485451] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.485687] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.485880] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleting the datastore file [datastore2] b0f32ce2-92fd-4290-a2f4-e5658f775f4f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.486164] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27fdcd03-276d-4dbb-9afb-1c4e0fa04640 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.494155] env[68285]: DEBUG oslo_vmware.api [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for the task: (returnval){ [ 969.494155] env[68285]: value = "task-2891643" [ 969.494155] env[68285]: _type = "Task" [ 969.494155] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.502755] env[68285]: DEBUG oslo_vmware.api [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.547401] env[68285]: INFO nova.compute.manager [-] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Took 1.35 seconds to deallocate network for instance. [ 969.575219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "c690490f-9278-4595-8286-d4fd970bbc39" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.575530] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.575782] env[68285]: DEBUG nova.compute.manager [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 969.577144] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606db398-fa4c-4343-bf7d-3f08be73da72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.585914] env[68285]: DEBUG nova.compute.manager [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 969.622534] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.690933] env[68285]: DEBUG nova.network.neutron [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.698444] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.800515] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891639, 'name': CreateVM_Task, 'duration_secs': 0.649982} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.800692] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 969.801405] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.801574] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.801898] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 969.802170] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0c04f9b-1af5-41cd-808d-29d3c4966caa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.808052] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 969.808052] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a84449-b6d8-5eb6-32d5-16f9e7a9bae9" [ 969.808052] env[68285]: _type = "Task" [ 969.808052] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.817171] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a84449-b6d8-5eb6-32d5-16f9e7a9bae9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.831243] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891641, 'name': Rename_Task, 'duration_secs': 0.18061} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.831533] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.831787] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a896d45d-e03f-41cc-885b-347418116aa2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.839195] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 969.839195] env[68285]: value = "task-2891644" [ 969.839195] env[68285]: _type = "Task" [ 969.839195] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.847447] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891644, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.938701] env[68285]: DEBUG nova.scheduler.client.report [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.005184] env[68285]: DEBUG oslo_vmware.api [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.056133] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.090305] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.091069] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eda09115-3a0e-4622-aed6-768be1878721 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.098471] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 970.098471] env[68285]: value = "task-2891645" [ 970.098471] env[68285]: _type = "Task" [ 970.098471] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.106609] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.133952] env[68285]: DEBUG nova.network.neutron [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updated VIF entry in instance network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.134443] env[68285]: DEBUG nova.network.neutron [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.201463] env[68285]: DEBUG nova.compute.manager [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68285) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 970.202851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.321720] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a84449-b6d8-5eb6-32d5-16f9e7a9bae9, 'name': SearchDatastore_Task, 'duration_secs': 0.013527} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.322169] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.322482] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.322786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.323094] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.323308] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.323579] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89eabca2-2ad1-43ff-8664-7e51b4ba253a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.345116] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.345852] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 970.346829] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89bbaf9d-4be0-468a-a03f-79520ca5a77b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.352618] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891644, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.356122] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 970.356122] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522b98f8-8752-fff5-32b3-c53a366a087d" [ 970.356122] env[68285]: _type = "Task" [ 970.356122] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.365127] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522b98f8-8752-fff5-32b3-c53a366a087d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.453026] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.453026] env[68285]: DEBUG nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 970.456116] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.779s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.457375] env[68285]: DEBUG nova.objects.instance [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lazy-loading 'resources' on Instance uuid d1b5abfa-fd38-4d17-b75f-5036af841d24 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.461972] env[68285]: DEBUG nova.network.neutron [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.505386] env[68285]: DEBUG oslo_vmware.api [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Task: {'id': task-2891643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.575058} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.505871] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.506171] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.506409] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.506643] env[68285]: INFO nova.compute.manager [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Took 1.66 seconds to destroy the instance on the hypervisor. [ 970.507025] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 970.507287] env[68285]: DEBUG nova.compute.manager [-] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 970.507434] env[68285]: DEBUG nova.network.neutron [-] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 970.608369] env[68285]: DEBUG oslo_vmware.api [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891645, 'name': PowerOffVM_Task, 'duration_secs': 0.357411} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.608664] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.608914] env[68285]: DEBUG nova.compute.manager [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 970.609691] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a10e2b-ef7f-45bc-bdde-642fd4d75f30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.637636] env[68285]: DEBUG oslo_concurrency.lockutils [req-93899f1d-ea8c-4dd3-8d11-5dcb4a28fbfc req-0451cfa6-00fc-4c7e-a3b5-d6304eccadca service nova] Releasing lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.701498] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.701845] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.702155] env[68285]: INFO nova.compute.manager [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Attaching volume 729629cd-cafe-4baf-9474-cba7083d3a6a to /dev/sdb [ 970.736279] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efef5fa8-35dc-40fe-80a2-a4f00433ad3c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.743815] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36557f39-93c8-4f90-8505-91d31e82f7ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.765043] env[68285]: DEBUG nova.virt.block_device [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updating existing volume attachment record: 46a43703-3a84-4234-810f-1a93f644d16d {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 970.848828] env[68285]: DEBUG oslo_vmware.api [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891644, 'name': PowerOnVM_Task, 'duration_secs': 0.513957} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.849120] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 970.849331] env[68285]: INFO nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Took 8.88 seconds to spawn the instance on the hypervisor. [ 970.849509] env[68285]: DEBUG nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 970.850296] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc278bd-706f-44fc-8c9f-f9127fc0fb33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.866795] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522b98f8-8752-fff5-32b3-c53a366a087d, 'name': SearchDatastore_Task, 'duration_secs': 0.022937} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.867564] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7c4f400-b000-441d-90d1-6a458496cc99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.872816] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 970.872816] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5290c6a8-9645-39a4-51f1-a5817e085b16" [ 970.872816] env[68285]: _type = "Task" [ 970.872816] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.880189] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5290c6a8-9645-39a4-51f1-a5817e085b16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.965404] env[68285]: DEBUG nova.compute.utils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 970.967043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Releasing lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.967137] env[68285]: DEBUG nova.compute.manager [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Inject network info {{(pid=68285) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 970.967385] env[68285]: DEBUG nova.compute.manager [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] network_info to inject: |[{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 970.972927] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Reconfiguring VM instance to set the machine id {{(pid=68285) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 970.973539] env[68285]: DEBUG nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 970.973723] env[68285]: DEBUG nova.network.neutron [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.978013] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae7aed4a-de53-4774-b668-af00f69d2886 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.997203] env[68285]: DEBUG oslo_vmware.api [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 970.997203] env[68285]: value = "task-2891649" [ 970.997203] env[68285]: _type = "Task" [ 970.997203] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.008529] env[68285]: DEBUG oslo_vmware.api [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891649, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.041253] env[68285]: DEBUG nova.policy [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64faebf5ce1549fe938f12248656d8d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2727048b316143c7bfa2aef4f9b264f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 971.123934] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.278483] env[68285]: DEBUG nova.network.neutron [-] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.365564] env[68285]: INFO nova.compute.manager [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Took 50.44 seconds to build instance. [ 971.386883] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5290c6a8-9645-39a4-51f1-a5817e085b16, 'name': SearchDatastore_Task, 'duration_secs': 0.023648} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.387802] env[68285]: DEBUG nova.network.neutron [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Successfully created port: 0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 971.389887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.390154] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/7bef3e2a-00ab-480a-aa8c-335635ee5d31.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 971.390417] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5082312-c883-4877-a24b-c900d2a4f1dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.399245] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 971.399245] env[68285]: value = "task-2891650" [ 971.399245] env[68285]: _type = "Task" [ 971.399245] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.407520] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.474773] env[68285]: DEBUG nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 971.507255] env[68285]: DEBUG oslo_vmware.api [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891649, 'name': ReconfigVM_Task, 'duration_secs': 0.226706} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.511044] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-54ac95ea-b80b-483b-933f-4813a0c2e401 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Reconfigured VM instance to set the machine id {{(pid=68285) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 971.515548] env[68285]: DEBUG nova.compute.manager [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Received event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 971.515843] env[68285]: DEBUG nova.compute.manager [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing instance network info cache due to event network-changed-4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 971.515988] env[68285]: DEBUG oslo_concurrency.lockutils [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] Acquiring lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.516152] env[68285]: DEBUG oslo_concurrency.lockutils [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] Acquired lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.516344] env[68285]: DEBUG nova.network.neutron [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Refreshing network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.632404] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75409a8d-8682-4e61-b52b-da4c664255f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.635310] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.639699] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4172c2c5-fff7-43fe-87a7-3f69d593b614 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.673544] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "87582063-50f9-4518-ad2d-915c9cd49b19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.673814] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "87582063-50f9-4518-ad2d-915c9cd49b19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.674010] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "87582063-50f9-4518-ad2d-915c9cd49b19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.674258] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "87582063-50f9-4518-ad2d-915c9cd49b19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.674447] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "87582063-50f9-4518-ad2d-915c9cd49b19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.676523] env[68285]: INFO nova.compute.manager [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Terminating instance [ 971.678331] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba41bd84-8555-4bb1-a30b-f8b6e57b72ca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.687027] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4a952d-c16c-42ba-98cf-8fc15ab08272 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.702447] env[68285]: DEBUG nova.compute.provider_tree [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.781982] env[68285]: INFO nova.compute.manager [-] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Took 1.27 seconds to deallocate network for instance. [ 971.868415] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f57830a0-7f58-45e4-ac98-eea65bbf85dd tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "8a848ec8-1ae0-4437-be4f-49219214d11f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.865s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.910044] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891650, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.183427] env[68285]: DEBUG nova.compute.manager [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 972.183637] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.184537] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea81238a-5e6b-4816-95e8-f17443a738f8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.192636] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.192891] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb4b99fc-e942-4a95-b9da-4a7e1c1d3dc0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.199569] env[68285]: DEBUG oslo_vmware.api [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 972.199569] env[68285]: value = "task-2891651" [ 972.199569] env[68285]: _type = "Task" [ 972.199569] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.206641] env[68285]: DEBUG nova.scheduler.client.report [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.216511] env[68285]: DEBUG oslo_vmware.api [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.288875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.409830] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891650, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73592} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.410204] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/7bef3e2a-00ab-480a-aa8c-335635ee5d31.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.410376] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.410589] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee24afed-d6f8-4299-a748-26bd8e5ca64b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.417068] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 972.417068] env[68285]: value = "task-2891652" [ 972.417068] env[68285]: _type = "Task" [ 972.417068] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.426093] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.472721] env[68285]: DEBUG nova.network.neutron [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updated VIF entry in instance network info cache for port 4dda7e58-86f1-4d41-ad9e-0f08c3df3241. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.473117] env[68285]: DEBUG nova.network.neutron [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [{"id": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "address": "fa:16:3e:4e:1c:f7", "network": {"id": "e5bd99f4-35a7-4389-ba74-8ae60f642ef1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-161057880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee59d7c8bf9d4e35b0c2e1861f375a1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96fdbb91-eb49-4dbf-b234-5b38503d7589", "external-id": "nsx-vlan-transportzone-392", "segmentation_id": 392, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dda7e58-86", "ovs_interfaceid": "4dda7e58-86f1-4d41-ad9e-0f08c3df3241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.488054] env[68285]: DEBUG nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 972.516600] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 972.516855] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.517043] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 972.517229] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.518845] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 972.518845] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 972.518845] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 972.518845] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 972.518845] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 972.518845] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 972.518845] env[68285]: DEBUG nova.virt.hardware [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 972.521876] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d23eae3-19aa-42ae-b051-fd538153aba7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.527335] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c547e9-87df-470c-b04a-82b47c53598e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.571746] env[68285]: INFO nova.compute.manager [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Rebuilding instance [ 972.616976] env[68285]: DEBUG nova.compute.manager [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.617920] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d39463d-8c25-4908-a206-732ad4dafddf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.716215] env[68285]: DEBUG oslo_vmware.api [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891651, 'name': PowerOffVM_Task, 'duration_secs': 0.196493} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.716665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.716898] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.718017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.262s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.720888] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-046714e4-9fdd-4000-83f1-8aa5426b6933 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.723828] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.503s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.724102] env[68285]: DEBUG nova.objects.instance [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 972.744765] env[68285]: INFO nova.scheduler.client.report [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted allocations for instance d1b5abfa-fd38-4d17-b75f-5036af841d24 [ 972.791419] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.791419] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.791419] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Deleting the datastore file [datastore1] 87582063-50f9-4518-ad2d-915c9cd49b19 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.792122] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2568379-4468-43eb-a8f2-ebfbcac14453 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.801329] env[68285]: DEBUG oslo_vmware.api [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for the task: (returnval){ [ 972.801329] env[68285]: value = "task-2891654" [ 972.801329] env[68285]: _type = "Task" [ 972.801329] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.811043] env[68285]: DEBUG oslo_vmware.api [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.879351] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "c690490f-9278-4595-8286-d4fd970bbc39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.879729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.879977] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "c690490f-9278-4595-8286-d4fd970bbc39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.880220] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.880410] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.882673] env[68285]: INFO nova.compute.manager [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Terminating instance [ 972.930362] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072568} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.930665] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.931501] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b556c7-3ffc-44c9-8f46-e88989e861f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.957896] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/7bef3e2a-00ab-480a-aa8c-335635ee5d31.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.958786] env[68285]: DEBUG nova.network.neutron [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Successfully updated port: 0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.960508] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecfa02be-e1ee-460a-b341-a30fd31644f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.978776] env[68285]: DEBUG oslo_concurrency.lockutils [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] Releasing lock "refresh_cache-87582063-50f9-4518-ad2d-915c9cd49b19" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.979042] env[68285]: DEBUG nova.compute.manager [req-587684d1-e209-49cd-a00a-394bd393af01 req-dfcf7135-eb44-43bf-95bd-fad7ece1350a service nova] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Received event network-vif-deleted-c3761ed0-eacf-4744-a549-4868f00f2bb5 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.984336] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 972.984336] env[68285]: value = "task-2891655" [ 972.984336] env[68285]: _type = "Task" [ 972.984336] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.994901] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.253636] env[68285]: DEBUG oslo_concurrency.lockutils [None req-837ec924-abf1-40f2-a395-a98346b4fe67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "d1b5abfa-fd38-4d17-b75f-5036af841d24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.094s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.311643] env[68285]: DEBUG oslo_vmware.api [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Task: {'id': task-2891654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268511} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.312371] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.312371] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.312371] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.312608] env[68285]: INFO nova.compute.manager [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Took 1.13 seconds to destroy the instance on the hypervisor. [ 973.312807] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.313018] env[68285]: DEBUG nova.compute.manager [-] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 973.313156] env[68285]: DEBUG nova.network.neutron [-] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 973.386879] env[68285]: DEBUG nova.compute.manager [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.387172] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.388260] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00608199-2109-43d8-8b14-8cae66fd5023 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.397023] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.398038] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-688ed072-dd7a-4f81-a0c6-be0517f32a6d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.472719] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.472719] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.472719] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleting the datastore file [datastore1] c690490f-9278-4595-8286-d4fd970bbc39 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.473223] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3d8e721-295d-4ece-ab36-1123d79d98bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.478082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.478228] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.478419] env[68285]: DEBUG nova.network.neutron [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 973.482687] env[68285]: DEBUG oslo_vmware.api [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 973.482687] env[68285]: value = "task-2891658" [ 973.482687] env[68285]: _type = "Task" [ 973.482687] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.496752] env[68285]: DEBUG oslo_vmware.api [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.501982] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891655, 'name': ReconfigVM_Task, 'duration_secs': 0.384524} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.502251] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/7bef3e2a-00ab-480a-aa8c-335635ee5d31.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.502905] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47df8728-7a8a-46c6-a58c-dfaaf6ee7071 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.509691] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 973.509691] env[68285]: value = "task-2891659" [ 973.509691] env[68285]: _type = "Task" [ 973.509691] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.519329] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891659, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.538242] env[68285]: DEBUG nova.compute.manager [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Received event network-vif-plugged-0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.538242] env[68285]: DEBUG oslo_concurrency.lockutils [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] Acquiring lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.538242] env[68285]: DEBUG oslo_concurrency.lockutils [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.538242] env[68285]: DEBUG oslo_concurrency.lockutils [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.538501] env[68285]: DEBUG nova.compute.manager [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] No waiting events found dispatching network-vif-plugged-0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 973.538501] env[68285]: WARNING nova.compute.manager [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Received unexpected event network-vif-plugged-0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 for instance with vm_state building and task_state spawning. [ 973.538681] env[68285]: DEBUG nova.compute.manager [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Received event network-changed-0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.538864] env[68285]: DEBUG nova.compute.manager [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Refreshing instance network info cache due to event network-changed-0b7efc8c-8a7b-4401-86cd-f76e8836c2c4. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 973.539224] env[68285]: DEBUG oslo_concurrency.lockutils [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] Acquiring lock "refresh_cache-f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.634673] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.634673] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-230c0a60-444e-41ff-94d4-d4df9e42f34c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.642099] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 973.642099] env[68285]: value = "task-2891660" [ 973.642099] env[68285]: _type = "Task" [ 973.642099] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.652621] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.737022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc567091-a231-4748-bd24-6dbfb5e4c40e tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.737022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.064s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.738897] env[68285]: INFO nova.compute.claims [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.997662] env[68285]: DEBUG oslo_vmware.api [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349103} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.997939] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.999033] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.999033] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.999033] env[68285]: INFO nova.compute.manager [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Took 0.61 seconds to destroy the instance on the hypervisor. [ 973.999033] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.999297] env[68285]: DEBUG nova.compute.manager [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 973.999297] env[68285]: DEBUG nova.network.neutron [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.022965] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891659, 'name': Rename_Task, 'duration_secs': 0.2744} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.025174] env[68285]: DEBUG nova.network.neutron [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.027238] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.027754] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30c33daa-e5e8-415b-9822-9db2e311e41c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.035679] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 974.035679] env[68285]: value = "task-2891661" [ 974.035679] env[68285]: _type = "Task" [ 974.035679] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.047731] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.152401] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891660, 'name': PowerOffVM_Task, 'duration_secs': 0.173853} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.152401] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.152561] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 974.153312] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647140e0-ac4e-488b-a8e1-39d9df70c8d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.163585] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 974.163585] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3dd5d0a4-a015-49e5-8713-85847235277f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.189913] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 974.192023] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 974.192023] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Deleting the datastore file [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.193942] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-045d4d97-9df1-44b2-80dc-7a87353f93ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.202455] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 974.202455] env[68285]: value = "task-2891663" [ 974.202455] env[68285]: _type = "Task" [ 974.202455] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.210883] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.271593] env[68285]: DEBUG nova.network.neutron [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Updating instance_info_cache with network_info: [{"id": "0b7efc8c-8a7b-4401-86cd-f76e8836c2c4", "address": "fa:16:3e:9c:c1:b9", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b7efc8c-8a", "ovs_interfaceid": "0b7efc8c-8a7b-4401-86cd-f76e8836c2c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.528491] env[68285]: DEBUG nova.network.neutron [-] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.546702] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891661, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.698100] env[68285]: INFO nova.compute.manager [None req-665ec42d-a47f-46fa-8ed6-b03e3016afb5 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Get console output [ 974.698572] env[68285]: WARNING nova.virt.vmwareapi.driver [None req-665ec42d-a47f-46fa-8ed6-b03e3016afb5 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] The console log is missing. Check your VSPC configuration [ 974.712469] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163995} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.712757] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.712947] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.713149] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.759035] env[68285]: DEBUG nova.network.neutron [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.774555] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.774874] env[68285]: DEBUG nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Instance network_info: |[{"id": "0b7efc8c-8a7b-4401-86cd-f76e8836c2c4", "address": "fa:16:3e:9c:c1:b9", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b7efc8c-8a", "ovs_interfaceid": "0b7efc8c-8a7b-4401-86cd-f76e8836c2c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 974.775885] env[68285]: DEBUG oslo_concurrency.lockutils [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] Acquired lock "refresh_cache-f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.776035] env[68285]: DEBUG nova.network.neutron [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Refreshing network info cache for port 0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.777261] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:c1:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b7efc8c-8a7b-4401-86cd-f76e8836c2c4', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.784810] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.785834] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.786136] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87d22d4c-f4ac-4739-90b3-6bcd7c854861 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.811771] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.811771] env[68285]: value = "task-2891664" [ 974.811771] env[68285]: _type = "Task" [ 974.811771] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.825565] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891664, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.031945] env[68285]: INFO nova.compute.manager [-] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Took 1.72 seconds to deallocate network for instance. [ 975.049461] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891661, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.254654] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b98fac-4922-47ad-af85-7e8c8ceb4366 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.262124] env[68285]: INFO nova.compute.manager [-] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Took 1.26 seconds to deallocate network for instance. [ 975.275025] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d642f5-c657-482f-bbcc-2bcb6f6041b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.325501] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 975.325747] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580937', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'name': 'volume-729629cd-cafe-4baf-9474-cba7083d3a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f20336-9c29-4aac-8c0d-f577749cd7d7', 'attached_at': '', 'detached_at': '', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'serial': '729629cd-cafe-4baf-9474-cba7083d3a6a'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 975.329876] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdfc687-cd1d-429c-bceb-5dd87632cf62 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.333755] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879eebd7-a9ca-4ee8-bc0f-14752a902a7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.343030] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891664, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.356742] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7568503-2a9e-4d47-91af-464463080852 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.360561] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6803901-3092-42ea-885b-ba041a797fa2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.391885] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.400845] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] volume-729629cd-cafe-4baf-9474-cba7083d3a6a/volume-729629cd-cafe-4baf-9474-cba7083d3a6a.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.403743] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8ebebfb-44c9-4b58-873d-7921cdaa37dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.422185] env[68285]: DEBUG oslo_vmware.api [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 975.422185] env[68285]: value = "task-2891665" [ 975.422185] env[68285]: _type = "Task" [ 975.422185] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.432368] env[68285]: DEBUG oslo_vmware.api [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891665, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.545413] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.551027] env[68285]: DEBUG oslo_vmware.api [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891661, 'name': PowerOnVM_Task, 'duration_secs': 1.272527} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.553494] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 975.553705] env[68285]: INFO nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Took 8.92 seconds to spawn the instance on the hypervisor. [ 975.553889] env[68285]: DEBUG nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 975.555103] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce1b8f6-80f4-47d8-8fa0-5d342f12d0ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.697103] env[68285]: DEBUG nova.network.neutron [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Updated VIF entry in instance network info cache for port 0b7efc8c-8a7b-4401-86cd-f76e8836c2c4. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.697553] env[68285]: DEBUG nova.network.neutron [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Updating instance_info_cache with network_info: [{"id": "0b7efc8c-8a7b-4401-86cd-f76e8836c2c4", "address": "fa:16:3e:9c:c1:b9", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b7efc8c-8a", "ovs_interfaceid": "0b7efc8c-8a7b-4401-86cd-f76e8836c2c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.756619] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 975.756888] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 975.757090] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 975.757207] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 975.757446] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 975.757553] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 975.757700] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 975.757870] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 975.758076] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 975.758347] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 975.758439] env[68285]: DEBUG nova.virt.hardware [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 975.759311] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa86483e-0a15-4b5d-bde0-9c9cad81b7c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.767761] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13204acf-394c-49a7-8ad0-ae4769a900e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.773890] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.782455] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 975.788212] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 975.788483] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 975.788707] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bf02ea6-0b25-4f1f-b7ce-0f07584d4bd8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.807098] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 975.807098] env[68285]: value = "task-2891666" [ 975.807098] env[68285]: _type = "Task" [ 975.807098] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.817989] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891666, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.834922] env[68285]: DEBUG nova.compute.manager [req-0a0a3360-765d-4a2f-a276-2e89f451627c req-61693d25-c434-4b1f-a271-6deab234ff7a service nova] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Received event network-vif-deleted-4dda7e58-86f1-4d41-ad9e-0f08c3df3241 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.835400] env[68285]: DEBUG nova.compute.manager [req-0a0a3360-765d-4a2f-a276-2e89f451627c req-61693d25-c434-4b1f-a271-6deab234ff7a service nova] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Received event network-vif-deleted-38ebf797-d9b9-4c8d-8159-fdf3be92518b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.841463] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891664, 'name': CreateVM_Task, 'duration_secs': 0.623172} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.841665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.842270] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.842439] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.842747] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 975.843015] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0499e717-2d30-410b-9efa-249ef41116a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.849076] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 975.849076] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c07970-709e-7e66-b5c9-b5abcc987e87" [ 975.849076] env[68285]: _type = "Task" [ 975.849076] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.857237] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c07970-709e-7e66-b5c9-b5abcc987e87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.904824] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 975.934466] env[68285]: DEBUG oslo_vmware.api [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891665, 'name': ReconfigVM_Task, 'duration_secs': 0.444063} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.934466] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Reconfigured VM instance instance-00000030 to attach disk [datastore2] volume-729629cd-cafe-4baf-9474-cba7083d3a6a/volume-729629cd-cafe-4baf-9474-cba7083d3a6a.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.938925] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-833724bc-9923-4ec8-a945-88588cef1b56 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.955306] env[68285]: DEBUG oslo_vmware.api [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 975.955306] env[68285]: value = "task-2891667" [ 975.955306] env[68285]: _type = "Task" [ 975.955306] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.963524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.963758] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.963961] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.964169] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.964327] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.965880] env[68285]: DEBUG oslo_vmware.api [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.966724] env[68285]: INFO nova.compute.manager [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Terminating instance [ 976.075706] env[68285]: INFO nova.compute.manager [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Took 50.71 seconds to build instance. [ 976.200768] env[68285]: DEBUG oslo_concurrency.lockutils [req-44302239-cb36-4021-be4e-117f02cdd723 req-ace4e226-cc79-4599-922d-585d6fa6283f service nova] Releasing lock "refresh_cache-f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.318081] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891666, 'name': CreateVM_Task, 'duration_secs': 0.317417} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.318268] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 976.318714] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.362381] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c07970-709e-7e66-b5c9-b5abcc987e87, 'name': SearchDatastore_Task, 'duration_secs': 0.011993} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.362573] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.362807] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 976.363048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.363201] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.363381] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.363660] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.363957] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 976.364197] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f80f84b1-9b31-4585-9742-fe5e3fc88e71 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.366111] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-904a3c27-799b-4d01-98df-012db4cd6175 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.372281] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 976.372281] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527b4162-fdc0-c986-d5c4-e6a9f1179886" [ 976.372281] env[68285]: _type = "Task" [ 976.372281] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.376111] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.376330] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 976.377386] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd61d7b8-00ea-4488-9076-564e32042af9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.382296] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527b4162-fdc0-c986-d5c4-e6a9f1179886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.386140] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 976.386140] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5248e72f-bbfa-d47d-d5db-a476210c0f9d" [ 976.386140] env[68285]: _type = "Task" [ 976.386140] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.392105] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525418ea-d913-826b-0560-da9ae488a209/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 976.392809] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a046d8-7ff5-4fb4-a1c3-3a5c593f4b9c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.398049] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5248e72f-bbfa-d47d-d5db-a476210c0f9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.401260] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525418ea-d913-826b-0560-da9ae488a209/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 976.401448] env[68285]: ERROR oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525418ea-d913-826b-0560-da9ae488a209/disk-0.vmdk due to incomplete transfer. [ 976.401670] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f18faf17-f422-4e5b-bb94-a4887248fa02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.407956] env[68285]: DEBUG oslo_vmware.rw_handles [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525418ea-d913-826b-0560-da9ae488a209/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 976.408210] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Uploaded image 81d960e4-b749-453c-ada9-72371e3f563e to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 976.410857] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 976.411700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.412229] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 976.415168] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e29d0e31-5c40-4b6f-8a58-83a49a4b49e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.416766] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.698s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.418493] env[68285]: INFO nova.compute.claims [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.425782] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 976.425782] env[68285]: value = "task-2891668" [ 976.425782] env[68285]: _type = "Task" [ 976.425782] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.434585] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891668, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.464917] env[68285]: DEBUG oslo_vmware.api [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891667, 'name': ReconfigVM_Task, 'duration_secs': 0.16658} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.464917] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580937', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'name': 'volume-729629cd-cafe-4baf-9474-cba7083d3a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f20336-9c29-4aac-8c0d-f577749cd7d7', 'attached_at': '', 'detached_at': '', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'serial': '729629cd-cafe-4baf-9474-cba7083d3a6a'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 976.470850] env[68285]: DEBUG nova.compute.manager [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 976.471090] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 976.471923] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8314ccb-0dc1-45cd-8e62-f8a63d49718c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.481537] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 976.481827] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd5afb3c-275b-43d6-97d4-d5c73393e794 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.487812] env[68285]: DEBUG oslo_vmware.api [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 976.487812] env[68285]: value = "task-2891669" [ 976.487812] env[68285]: _type = "Task" [ 976.487812] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.495896] env[68285]: DEBUG oslo_vmware.api [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.518956] env[68285]: INFO nova.compute.manager [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Rescuing [ 976.519271] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.519425] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquired lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.519612] env[68285]: DEBUG nova.network.neutron [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.578238] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5e02f258-9c62-43eb-a7f2-452cbf3818d2 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.025s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.882952] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527b4162-fdc0-c986-d5c4-e6a9f1179886, 'name': SearchDatastore_Task, 'duration_secs': 0.009311} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.883273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.883498] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 976.883706] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.894626] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5248e72f-bbfa-d47d-d5db-a476210c0f9d, 'name': SearchDatastore_Task, 'duration_secs': 0.01083} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.895360] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-073c3b64-a610-4e8a-adad-1156d067e48c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.900589] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 976.900589] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b2eaa0-a901-1f31-fa54-4c0f66e1e502" [ 976.900589] env[68285]: _type = "Task" [ 976.900589] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.909044] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b2eaa0-a901-1f31-fa54-4c0f66e1e502, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.917161] env[68285]: DEBUG nova.compute.utils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 976.917683] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 976.918391] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 976.938145] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891668, 'name': Destroy_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.973027] env[68285]: DEBUG nova.policy [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48401b0b09a2477db2a87df4835c70a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b390e5b4080a4984a3f935e9e6a0dd2a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 976.997873] env[68285]: DEBUG oslo_vmware.api [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891669, 'name': PowerOffVM_Task, 'duration_secs': 0.350248} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.999890] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 977.000026] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.000488] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32210fe1-e248-4fc3-aa65-c8778a37a28e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.069917] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.070235] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.070456] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Deleting the datastore file [datastore1] e28d0927-17c2-4256-93d4-ef0cc2c9b92a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.070746] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d290d5c-f724-4ed4-b310-dc451b861e61 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.077158] env[68285]: DEBUG oslo_vmware.api [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for the task: (returnval){ [ 977.077158] env[68285]: value = "task-2891671" [ 977.077158] env[68285]: _type = "Task" [ 977.077158] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.086141] env[68285]: DEBUG oslo_vmware.api [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.213516] env[68285]: DEBUG nova.network.neutron [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.411788] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b2eaa0-a901-1f31-fa54-4c0f66e1e502, 'name': SearchDatastore_Task, 'duration_secs': 0.009312} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.415022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.415022] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 977.415022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.415022] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.415022] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d73f2d5-4e4b-4996-9451-76a3c6f826bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.415381] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43f263a0-2f42-4662-8cf9-44d749dda9ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.420622] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 977.428015] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.428210] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 977.429071] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 977.429071] env[68285]: value = "task-2891672" [ 977.429071] env[68285]: _type = "Task" [ 977.429071] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.429737] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a28c11af-206b-439c-8596-117ebe3c224f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.441843] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Successfully created port: d542d712-22ed-45d7-bf6e-ce3ae5cf5556 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.450925] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891668, 'name': Destroy_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.454570] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.454899] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 977.454899] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]526cb423-be40-a80e-6160-4f74f18e9707" [ 977.454899] env[68285]: _type = "Task" [ 977.454899] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.470367] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526cb423-be40-a80e-6160-4f74f18e9707, 'name': SearchDatastore_Task, 'duration_secs': 0.019059} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.471117] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8ebc59b-fff1-4639-8704-cca45ea80823 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.478466] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 977.478466] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52755901-a2b9-f067-bbc9-74d00cdfd665" [ 977.478466] env[68285]: _type = "Task" [ 977.478466] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.486355] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52755901-a2b9-f067-bbc9-74d00cdfd665, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.502806] env[68285]: DEBUG nova.objects.instance [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'flavor' on Instance uuid d4f20336-9c29-4aac-8c0d-f577749cd7d7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.586981] env[68285]: DEBUG oslo_vmware.api [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Task: {'id': task-2891671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130245} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.591016] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.591016] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.591016] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.591016] env[68285]: INFO nova.compute.manager [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 977.591016] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.591796] env[68285]: DEBUG nova.compute.manager [-] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 977.591999] env[68285]: DEBUG nova.network.neutron [-] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 977.719685] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Releasing lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.954029] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891672, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.958358] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891668, 'name': Destroy_Task, 'duration_secs': 1.339437} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.959313] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Destroyed the VM [ 977.959671] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 977.960329] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1d9bac3a-dd2f-4443-a29f-9712527aaa90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.969080] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 977.969080] env[68285]: value = "task-2891673" [ 977.969080] env[68285]: _type = "Task" [ 977.969080] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.983910] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891673, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.994684] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52755901-a2b9-f067-bbc9-74d00cdfd665, 'name': SearchDatastore_Task, 'duration_secs': 0.009476} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.996598] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.997050] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 977.998701] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f763bb15-3544-403f-94c9-b5b3b5a4ba0a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.002528] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-287786d6-5ac1-4441-a0b0-f69d7eec958c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.009200] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dde7dfc8-efc7-49b4-b7c0-a9d8fe70bb35 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.307s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.018043] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cc3962-4b0a-419d-9d9c-881d699b8c72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.022152] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 978.022152] env[68285]: value = "task-2891674" [ 978.022152] env[68285]: _type = "Task" [ 978.022152] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.061821] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e522f5b-ddb2-4e44-bf25-515253848acb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.068902] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.075660] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07c4ebf-ddd0-4adb-9c99-fcde5cf0f944 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.093845] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.444387] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 978.456311] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891672, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703425} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.456311] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 978.456311] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 978.456311] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8dc7224-8a50-4b58-a27d-1ef21891e380 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.469323] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 978.469323] env[68285]: value = "task-2891675" [ 978.469323] env[68285]: _type = "Task" [ 978.469323] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.476771] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 978.477468] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.477733] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 978.478012] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.478221] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 978.478850] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 978.479185] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 978.479423] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 978.479696] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 978.479953] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 978.483823] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 978.484854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48d7e6b-ff55-488e-9d06-9a6cb8d5e28d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.500974] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1ff1ff-8fe2-4f4a-aa18-54e252432cfe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.506024] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891673, 'name': RemoveSnapshot_Task} progress is 47%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.506396] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.509023] env[68285]: DEBUG nova.compute.manager [req-15ae8f98-00eb-4ceb-a67d-1928b2881cc8 req-0d229510-deab-4b34-9b70-661cb73b5dda service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Received event network-vif-deleted-56efc1e7-b396-4ba4-8104-803f5f018f35 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.510069] env[68285]: INFO nova.compute.manager [req-15ae8f98-00eb-4ceb-a67d-1928b2881cc8 req-0d229510-deab-4b34-9b70-661cb73b5dda service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Neutron deleted interface 56efc1e7-b396-4ba4-8104-803f5f018f35; detaching it from the instance and deleting it from the info cache [ 978.510069] env[68285]: DEBUG nova.network.neutron [req-15ae8f98-00eb-4ceb-a67d-1928b2881cc8 req-0d229510-deab-4b34-9b70-661cb73b5dda service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.539117] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891674, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.617997] env[68285]: ERROR nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [req-624ba51f-5b03-4b3f-8c0c-eddc8c8f9bf0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-624ba51f-5b03-4b3f-8c0c-eddc8c8f9bf0"}]} [ 978.631989] env[68285]: DEBUG nova.network.neutron [-] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.639228] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 978.658033] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 978.658285] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.674619] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 978.700140] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 978.983304] env[68285]: DEBUG oslo_vmware.api [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891673, 'name': RemoveSnapshot_Task, 'duration_secs': 0.799412} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.983578] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 978.983806] env[68285]: INFO nova.compute.manager [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Took 17.78 seconds to snapshot the instance on the hypervisor. [ 978.992713] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.221288} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.992956] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.993725] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3368891-c1de-4467-9781-de27b5ab6d16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.016995] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.019149] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7a7507b-d34e-41ee-af09-d8d59589792f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.036654] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa00cd5f-dbc1-4024-9b19-449ae0d3f08b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.052921] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552329} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.053015] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 979.053015] env[68285]: value = "task-2891676" [ 979.053015] env[68285]: _type = "Task" [ 979.053015] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.055843] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593bb92b-8273-46be-84ea-6745121fec8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.066713] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.068221] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.070308] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ae8ea5e-1cf4-473e-aadb-e0ffd5cf83b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.080654] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 979.080654] env[68285]: value = "task-2891677" [ 979.080654] env[68285]: _type = "Task" [ 979.080654] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.085907] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.104932] env[68285]: DEBUG nova.compute.manager [req-15ae8f98-00eb-4ceb-a67d-1928b2881cc8 req-0d229510-deab-4b34-9b70-661cb73b5dda service nova] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Detach interface failed, port_id=56efc1e7-b396-4ba4-8104-803f5f018f35, reason: Instance e28d0927-17c2-4256-93d4-ef0cc2c9b92a could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 979.113294] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891677, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.137369] env[68285]: INFO nova.compute.manager [-] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Took 1.54 seconds to deallocate network for instance. [ 979.247434] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba89849-69c4-4b3d-92d9-6cdb0b072c13 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.257339] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822f6038-8732-406f-a597-c440afde54c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.260577] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.260859] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79c648e7-e6f7-419f-be0e-94cba551918c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.291587] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6398d4ca-6cc9-4fc8-96e5-623fc5453fb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.294512] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 979.294512] env[68285]: value = "task-2891678" [ 979.294512] env[68285]: _type = "Task" [ 979.294512] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.302007] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259aa36b-e8de-4cdc-a1af-abc58ff6428e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.309191] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.319897] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.537588] env[68285]: DEBUG nova.compute.manager [None req-e081ebda-3fc6-4363-99a8-189f1f0c691c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Found 2 images (rotation: 2) {{(pid=68285) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 979.545077] env[68285]: DEBUG nova.compute.manager [req-e379859e-fd55-488c-9f82-9eeed0f826a6 req-1041dfc3-8e84-4af9-82d5-3943fa7c252c service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Received event network-vif-plugged-d542d712-22ed-45d7-bf6e-ce3ae5cf5556 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 979.545077] env[68285]: DEBUG oslo_concurrency.lockutils [req-e379859e-fd55-488c-9f82-9eeed0f826a6 req-1041dfc3-8e84-4af9-82d5-3943fa7c252c service nova] Acquiring lock "32d23c62-23ec-4732-a95d-6ac32805e1b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.545077] env[68285]: DEBUG oslo_concurrency.lockutils [req-e379859e-fd55-488c-9f82-9eeed0f826a6 req-1041dfc3-8e84-4af9-82d5-3943fa7c252c service nova] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.545077] env[68285]: DEBUG oslo_concurrency.lockutils [req-e379859e-fd55-488c-9f82-9eeed0f826a6 req-1041dfc3-8e84-4af9-82d5-3943fa7c252c service nova] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.545077] env[68285]: DEBUG nova.compute.manager [req-e379859e-fd55-488c-9f82-9eeed0f826a6 req-1041dfc3-8e84-4af9-82d5-3943fa7c252c service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] No waiting events found dispatching network-vif-plugged-d542d712-22ed-45d7-bf6e-ce3ae5cf5556 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 979.545077] env[68285]: WARNING nova.compute.manager [req-e379859e-fd55-488c-9f82-9eeed0f826a6 req-1041dfc3-8e84-4af9-82d5-3943fa7c252c service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Received unexpected event network-vif-plugged-d542d712-22ed-45d7-bf6e-ce3ae5cf5556 for instance with vm_state building and task_state spawning. [ 979.579496] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891676, 'name': ReconfigVM_Task, 'duration_secs': 0.338691} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.579764] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Reconfigured VM instance instance-00000039 to attach disk [datastore2] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.580418] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aefb321d-a406-461d-a408-1c3db052fd76 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.589186] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 979.589186] env[68285]: value = "task-2891679" [ 979.589186] env[68285]: _type = "Task" [ 979.589186] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.596013] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891677, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067844} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.596563] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 979.597313] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412e2553-7073-4d7d-82c6-cbfb0dea43ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.602701] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891679, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.625529] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.626441] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Successfully updated port: d542d712-22ed-45d7-bf6e-ce3ae5cf5556 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 979.627897] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc43a71e-0be4-4b5d-b53f-712458d18c0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.643822] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.650348] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 979.650348] env[68285]: value = "task-2891680" [ 979.650348] env[68285]: _type = "Task" [ 979.650348] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.658709] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891680, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.804756] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891678, 'name': PowerOffVM_Task, 'duration_secs': 0.212315} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.805027] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.805846] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f30d8a-ad31-430a-8cba-da5fd8079b19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.828082] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f575428d-a7e3-4f7a-bc77-5a3b920ad05b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.847385] env[68285]: ERROR nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [req-1517215c-86ce-43a2-93b9-fe4aa58b8166] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1517215c-86ce-43a2-93b9-fe4aa58b8166"}]} [ 979.860467] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.861921] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9ff950a-576e-4f02-bfc2-a45babf28c1b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.862947] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 979.871100] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 979.871100] env[68285]: value = "task-2891681" [ 979.871100] env[68285]: _type = "Task" [ 979.871100] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.897742] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 979.897742] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.897742] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.906692] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 979.923360] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 979.970479] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.970479] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.102163] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891679, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.143944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "refresh_cache-32d23c62-23ec-4732-a95d-6ac32805e1b9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.144118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "refresh_cache-32d23c62-23ec-4732-a95d-6ac32805e1b9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.144251] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 980.164684] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891680, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.334286] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e280a46-ec98-4584-a536-63fccbadbc1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.343132] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89793e0-ef8f-4adf-b411-a4554e066b00 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.380249] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffa7764-6467-4f32-bcef-08d187a8b6d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.389295] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 980.389504] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.389746] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.389889] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.390077] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.390362] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-006348a5-7ae1-4dc3-aca9-578fbad7812c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.392857] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14674a1-2d51-44a5-953e-b9ebafd14c60 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.408545] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 980.411024] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.411024] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.412112] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d57fc02-1545-4fa2-a816-1ced1085269e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.418150] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 980.418150] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f035f5-b79f-a4ad-59ee-22e17c61b041" [ 980.418150] env[68285]: _type = "Task" [ 980.418150] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.428971] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f035f5-b79f-a4ad-59ee-22e17c61b041, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.472997] env[68285]: DEBUG nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 980.600789] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891679, 'name': Rename_Task, 'duration_secs': 0.879139} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.601089] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 980.601331] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68a7ee20-ccb8-4043-87fb-814826d23990 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.607281] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 980.607281] env[68285]: value = "task-2891682" [ 980.607281] env[68285]: _type = "Task" [ 980.607281] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.614774] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.662746] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891680, 'name': ReconfigVM_Task, 'duration_secs': 0.961964} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.663045] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.663732] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d958959-0aaa-4ccb-a9f2-8bd8a1226f42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.670928] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 980.670928] env[68285]: value = "task-2891683" [ 980.670928] env[68285]: _type = "Task" [ 980.670928] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.678712] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891683, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.698304] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 980.828131] env[68285]: DEBUG nova.compute.manager [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 980.829154] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b3ac1b-b85b-4c9a-a8a7-8c7bd3651a30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.918263] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Updating instance_info_cache with network_info: [{"id": "d542d712-22ed-45d7-bf6e-ce3ae5cf5556", "address": "fa:16:3e:8c:05:d0", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd542d712-22", "ovs_interfaceid": "d542d712-22ed-45d7-bf6e-ce3ae5cf5556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.929618] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f035f5-b79f-a4ad-59ee-22e17c61b041, 'name': SearchDatastore_Task, 'duration_secs': 0.009919} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.929956] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7725d65e-5b63-43a3-8e5e-4bcf5dcbe884 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.935811] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 980.935811] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521760be-b3a0-82d6-6555-0112c1b2b101" [ 980.935811] env[68285]: _type = "Task" [ 980.935811] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.951382] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521760be-b3a0-82d6-6555-0112c1b2b101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.955063] env[68285]: DEBUG nova.scheduler.client.report [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 80 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 980.955320] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 80 to 81 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 980.955495] env[68285]: DEBUG nova.compute.provider_tree [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 980.994316] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.116246] env[68285]: DEBUG oslo_vmware.api [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891682, 'name': PowerOnVM_Task, 'duration_secs': 0.471991} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.116519] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.116722] env[68285]: INFO nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Took 8.63 seconds to spawn the instance on the hypervisor. [ 981.116907] env[68285]: DEBUG nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.117709] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba03020d-3d59-4b29-b693-5cc2a05eb870 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.179848] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891683, 'name': Rename_Task, 'duration_secs': 0.155937} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.180133] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.180375] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1542182b-f554-49ce-b66d-140de79593d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.186156] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 981.186156] env[68285]: value = "task-2891684" [ 981.186156] env[68285]: _type = "Task" [ 981.186156] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.194741] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.340558] env[68285]: INFO nova.compute.manager [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] instance snapshotting [ 981.341194] env[68285]: DEBUG nova.objects.instance [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'flavor' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.424354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "refresh_cache-32d23c62-23ec-4732-a95d-6ac32805e1b9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.424736] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Instance network_info: |[{"id": "d542d712-22ed-45d7-bf6e-ce3ae5cf5556", "address": "fa:16:3e:8c:05:d0", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd542d712-22", "ovs_interfaceid": "d542d712-22ed-45d7-bf6e-ce3ae5cf5556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 981.425172] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:05:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd542d712-22ed-45d7-bf6e-ce3ae5cf5556', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.432890] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Creating folder: Project (b390e5b4080a4984a3f935e9e6a0dd2a). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 981.433170] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fab7cb7f-12ff-4723-a19e-cceb8ef72a78 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.444721] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521760be-b3a0-82d6-6555-0112c1b2b101, 'name': SearchDatastore_Task, 'duration_secs': 0.013851} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.445983] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.446276] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. {{(pid=68285) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 981.446543] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Created folder: Project (b390e5b4080a4984a3f935e9e6a0dd2a) in parent group-v580775. [ 981.446730] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Creating folder: Instances. Parent ref: group-v580940. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 981.447103] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d5b6e21-32b6-480d-99c8-2ab3f33c3626 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.448954] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8cf9eeb6-93aa-48d5-a84c-8bd10fa841e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.454938] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 981.454938] env[68285]: value = "task-2891686" [ 981.454938] env[68285]: _type = "Task" [ 981.454938] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.458958] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Created folder: Instances in parent group-v580940. [ 981.459192] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 981.459661] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 981.460927] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.044s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.460927] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 981.465923] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9b7b1ce-7d37-4ccb-a12f-4183f42161b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.480406] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.462s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.480674] env[68285]: DEBUG nova.objects.instance [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lazy-loading 'resources' on Instance uuid c7ab28c3-a316-4685-b876-a0e7c657ec35 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.482048] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.487761] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.487761] env[68285]: value = "task-2891688" [ 981.487761] env[68285]: _type = "Task" [ 981.487761] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.496489] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891688, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.593215] env[68285]: DEBUG nova.compute.manager [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Received event network-changed-d542d712-22ed-45d7-bf6e-ce3ae5cf5556 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 981.593431] env[68285]: DEBUG nova.compute.manager [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Refreshing instance network info cache due to event network-changed-d542d712-22ed-45d7-bf6e-ce3ae5cf5556. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 981.593507] env[68285]: DEBUG oslo_concurrency.lockutils [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] Acquiring lock "refresh_cache-32d23c62-23ec-4732-a95d-6ac32805e1b9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.593644] env[68285]: DEBUG oslo_concurrency.lockutils [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] Acquired lock "refresh_cache-32d23c62-23ec-4732-a95d-6ac32805e1b9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.593800] env[68285]: DEBUG nova.network.neutron [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Refreshing network info cache for port d542d712-22ed-45d7-bf6e-ce3ae5cf5556 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 981.639311] env[68285]: INFO nova.compute.manager [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Took 46.36 seconds to build instance. [ 981.697014] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891684, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.846396] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec878074-293c-4ae9-acde-0f1e492932ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.867100] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb8c920-8bd8-4de7-8fb4-a8d16d1680ca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.964558] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.984526] env[68285]: DEBUG nova.compute.utils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 981.988773] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 981.988955] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.000773] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891688, 'name': CreateVM_Task, 'duration_secs': 0.356571} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.000956] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.001617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.002138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.002138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 982.002337] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a831830-e2bd-4712-810d-d62cb1142c32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.007027] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 982.007027] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dced36-47b7-12a9-905d-ae64a2903508" [ 982.007027] env[68285]: _type = "Task" [ 982.007027] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.017770] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52dced36-47b7-12a9-905d-ae64a2903508, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.055620] env[68285]: DEBUG nova.policy [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48401b0b09a2477db2a87df4835c70a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b390e5b4080a4984a3f935e9e6a0dd2a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 982.142412] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d27f2949-1200-4b0c-aff3-a2a4245a7cd7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.871s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.198662] env[68285]: DEBUG oslo_vmware.api [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891684, 'name': PowerOnVM_Task, 'duration_secs': 0.686035} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.199020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.199224] env[68285]: DEBUG nova.compute.manager [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.200281] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8e7a61-eb66-49b6-b69d-2847d3f3393e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.353382] env[68285]: DEBUG nova.network.neutron [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Updated VIF entry in instance network info cache for port d542d712-22ed-45d7-bf6e-ce3ae5cf5556. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 982.353818] env[68285]: DEBUG nova.network.neutron [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Updating instance_info_cache with network_info: [{"id": "d542d712-22ed-45d7-bf6e-ce3ae5cf5556", "address": "fa:16:3e:8c:05:d0", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd542d712-22", "ovs_interfaceid": "d542d712-22ed-45d7-bf6e-ce3ae5cf5556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.378577] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 982.379040] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-804d8e01-0885-4879-8b5e-87355c2a72bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.391414] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 982.391414] env[68285]: value = "task-2891689" [ 982.391414] env[68285]: _type = "Task" [ 982.391414] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.401555] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891689, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.467257] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891686, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.489851] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 982.520117] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52dced36-47b7-12a9-905d-ae64a2903508, 'name': SearchDatastore_Task, 'duration_secs': 0.010039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.523069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.524476] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.524795] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.525009] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.525281] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.526467] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d2f6154-f7ed-476a-8902-d94d3eb2cc4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.536301] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7482c860-b855-4d14-bb5c-053e71af2659 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.543837] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d791747-bc6c-4611-8413-0518ccffcd1b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.548071] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.548314] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.549349] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30b491d9-0c1c-403c-b98a-892c3ab8d9e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.579740] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dbf4df-0179-417b-879d-fc7988c1df78 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.583705] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 982.583705] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523215d1-683e-bfcb-c135-b3ad36c1ca1f" [ 982.583705] env[68285]: _type = "Task" [ 982.583705] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.590199] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Successfully created port: 08074d01-1b01-4ca8-a5ca-f427c8ec414e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.593612] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0415959-4469-4453-a293-3305210f9a22 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.601352] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523215d1-683e-bfcb-c135-b3ad36c1ca1f, 'name': SearchDatastore_Task, 'duration_secs': 0.008182} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.602690] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41dbfbf1-651c-444b-82a6-52880680aa8d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.613170] env[68285]: DEBUG nova.compute.provider_tree [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 982.617298] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 982.617298] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52480fbb-7821-25e7-107b-ef071ae8ee84" [ 982.617298] env[68285]: _type = "Task" [ 982.617298] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.625256] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52480fbb-7821-25e7-107b-ef071ae8ee84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.721512] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.862753] env[68285]: DEBUG oslo_concurrency.lockutils [req-75520d95-a26c-4de5-8f83-e0cb0a2db3b3 req-9985a3f2-2b9c-4849-844a-b0dbc1eb6d06 service nova] Releasing lock "refresh_cache-32d23c62-23ec-4732-a95d-6ac32805e1b9" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.900875] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891689, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.966062] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891686, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.080065} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.966359] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. [ 982.967135] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889b34f8-aad3-4427-afc1-7e636f5c0e2b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.992967] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.993499] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8ee0a7a-eff9-456b-b6ea-f842a332a2e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.020308] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 983.020308] env[68285]: value = "task-2891690" [ 983.020308] env[68285]: _type = "Task" [ 983.020308] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.029474] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891690, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.128318] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52480fbb-7821-25e7-107b-ef071ae8ee84, 'name': SearchDatastore_Task, 'duration_secs': 0.008612} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.128579] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.128851] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 32d23c62-23ec-4732-a95d-6ac32805e1b9/32d23c62-23ec-4732-a95d-6ac32805e1b9.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 983.129145] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80420e6c-c098-4e93-b6be-05d7a6e2ce41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.136131] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 983.136131] env[68285]: value = "task-2891691" [ 983.136131] env[68285]: _type = "Task" [ 983.136131] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.143865] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891691, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.147565] env[68285]: DEBUG nova.scheduler.client.report [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 983.147807] env[68285]: DEBUG nova.compute.provider_tree [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 81 to 82 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 983.148014] env[68285]: DEBUG nova.compute.provider_tree [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 983.402956] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891689, 'name': CreateSnapshot_Task, 'duration_secs': 0.560121} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.403320] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 983.404309] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41757e0d-81d2-4883-8598-f0929d8baeff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.514873] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 983.520077] env[68285]: INFO nova.compute.manager [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Rebuilding instance [ 983.535641] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891690, 'name': ReconfigVM_Task, 'duration_secs': 0.297941} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.538095] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.539401] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a42a9da-f5ed-4f2e-ad5d-9dcd3cc51324 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.546327] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 983.546583] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.546742] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 983.546926] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.547096] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 983.547249] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 983.547458] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 983.547617] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 983.547781] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 983.547973] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 983.548179] env[68285]: DEBUG nova.virt.hardware [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 983.549046] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cc1910-8e32-43b8-9b0c-60f25723754e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.577255] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2691c128-7c6f-424b-aafb-801ca4ddf861 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.596492] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d1556a-7d7d-47f8-abe5-8904e82509a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.602653] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 983.602653] env[68285]: value = "task-2891692" [ 983.602653] env[68285]: _type = "Task" [ 983.602653] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.623843] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.626691] env[68285]: DEBUG nova.compute.manager [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 983.627688] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230d4a3c-c734-4b55-8f0d-01cc1363581a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.647695] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891691, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499043} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.647695] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 32d23c62-23ec-4732-a95d-6ac32805e1b9/32d23c62-23ec-4732-a95d-6ac32805e1b9.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.647695] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.648537] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7a8e997-d7f2-4f13-ad45-5fce1fcae19f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.653408] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.656625] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.299s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.657558] env[68285]: DEBUG nova.objects.instance [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lazy-loading 'resources' on Instance uuid 5266817c-ce3b-4c96-a3bd-32b631c29b81 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.661504] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 983.661504] env[68285]: value = "task-2891693" [ 983.661504] env[68285]: _type = "Task" [ 983.661504] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.668295] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.677987] env[68285]: INFO nova.scheduler.client.report [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Deleted allocations for instance c7ab28c3-a316-4685-b876-a0e7c657ec35 [ 983.719577] env[68285]: INFO nova.compute.manager [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Rebuilding instance [ 983.763712] env[68285]: DEBUG nova.compute.manager [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 983.764615] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71a013f-4713-4991-972f-2982df1953af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.923780] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 983.923780] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3fc72dc0-fbee-484c-97f8-4acf4fed6f91 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.933225] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 983.933225] env[68285]: value = "task-2891694" [ 983.933225] env[68285]: _type = "Task" [ 983.933225] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.941293] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891694, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.114135] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891692, 'name': ReconfigVM_Task, 'duration_secs': 0.175302} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.114511] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.114853] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d9e0ee8-7ed5-404a-bae7-f93ae6921615 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.121578] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 984.121578] env[68285]: value = "task-2891695" [ 984.121578] env[68285]: _type = "Task" [ 984.121578] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.130946] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.175342] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069982} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.175954] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 984.179256] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73889d6-51c1-4ffd-a4b9-ef63c8d3892f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.187842] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebf31979-9253-4cdb-b25d-649434722c34 tempest-ServersAaction247Test-481979879 tempest-ServersAaction247Test-481979879-project-member] Lock "c7ab28c3-a316-4685-b876-a0e7c657ec35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.463s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.214219] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 32d23c62-23ec-4732-a95d-6ac32805e1b9/32d23c62-23ec-4732-a95d-6ac32805e1b9.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.218315] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51a0c803-0a22-4084-b749-ebbb24ee2820 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.239158] env[68285]: DEBUG nova.compute.manager [req-3f208a4e-a8f2-4cfc-81f6-fcb01f224cf8 req-fba90229-ab2e-481f-8bf7-d5eb8c0e977e service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Received event network-vif-plugged-08074d01-1b01-4ca8-a5ca-f427c8ec414e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 984.239158] env[68285]: DEBUG oslo_concurrency.lockutils [req-3f208a4e-a8f2-4cfc-81f6-fcb01f224cf8 req-fba90229-ab2e-481f-8bf7-d5eb8c0e977e service nova] Acquiring lock "9569d50c-d358-4cc5-a106-32da785e4765-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.239158] env[68285]: DEBUG oslo_concurrency.lockutils [req-3f208a4e-a8f2-4cfc-81f6-fcb01f224cf8 req-fba90229-ab2e-481f-8bf7-d5eb8c0e977e service nova] Lock "9569d50c-d358-4cc5-a106-32da785e4765-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.239707] env[68285]: DEBUG oslo_concurrency.lockutils [req-3f208a4e-a8f2-4cfc-81f6-fcb01f224cf8 req-fba90229-ab2e-481f-8bf7-d5eb8c0e977e service nova] Lock "9569d50c-d358-4cc5-a106-32da785e4765-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.239707] env[68285]: DEBUG nova.compute.manager [req-3f208a4e-a8f2-4cfc-81f6-fcb01f224cf8 req-fba90229-ab2e-481f-8bf7-d5eb8c0e977e service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] No waiting events found dispatching network-vif-plugged-08074d01-1b01-4ca8-a5ca-f427c8ec414e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 984.239844] env[68285]: WARNING nova.compute.manager [req-3f208a4e-a8f2-4cfc-81f6-fcb01f224cf8 req-fba90229-ab2e-481f-8bf7-d5eb8c0e977e service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Received unexpected event network-vif-plugged-08074d01-1b01-4ca8-a5ca-f427c8ec414e for instance with vm_state building and task_state spawning. [ 984.244212] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 984.244212] env[68285]: value = "task-2891696" [ 984.244212] env[68285]: _type = "Task" [ 984.244212] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.257713] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891696, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.376908] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Successfully updated port: 08074d01-1b01-4ca8-a5ca-f427c8ec414e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.443913] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891694, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.635122] env[68285]: DEBUG oslo_vmware.api [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891695, 'name': PowerOnVM_Task, 'duration_secs': 0.433248} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.635122] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.638668] env[68285]: DEBUG nova.compute.manager [None req-7843081e-46a4-4698-b3d1-96cacdefb2c7 tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 984.639814] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7400ac55-2b7a-40f6-a5a8-2b4c301f6407 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.643595] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.643809] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28be9c55-5f65-4c92-9e27-6cc33b3caf77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.660591] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 984.660591] env[68285]: value = "task-2891697" [ 984.660591] env[68285]: _type = "Task" [ 984.660591] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.674144] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.740564] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed05e6e-6f06-4e38-9666-3282f14acd67 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.754491] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bb530f-b16c-4684-a177-a104fa97854f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.757830] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891696, 'name': ReconfigVM_Task, 'duration_secs': 0.459203} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.758193] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 32d23c62-23ec-4732-a95d-6ac32805e1b9/32d23c62-23ec-4732-a95d-6ac32805e1b9.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.759232] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-136e7528-860d-477b-904a-69f89436e8ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.789285] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.789645] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-358a523d-e771-41e9-beb1-b0c802d0dddc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.792071] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d872b9-2a25-4401-9da2-7070af7a79ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.795968] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 984.795968] env[68285]: value = "task-2891698" [ 984.795968] env[68285]: _type = "Task" [ 984.795968] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.804989] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 984.804989] env[68285]: value = "task-2891699" [ 984.804989] env[68285]: _type = "Task" [ 984.804989] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.806736] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb0083a-5295-402c-9b93-3235f5792317 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.819102] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891698, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.830082] env[68285]: DEBUG nova.compute.provider_tree [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.835602] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.880696] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "refresh_cache-9569d50c-d358-4cc5-a106-32da785e4765" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.880696] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "refresh_cache-9569d50c-d358-4cc5-a106-32da785e4765" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.880696] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 984.943452] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891694, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.172284] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891697, 'name': PowerOffVM_Task, 'duration_secs': 0.308336} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.172819] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.173260] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.175098] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982abf48-10a9-479c-8373-9c55d59c7c79 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.182135] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.182548] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c29e9704-a82a-42ef-ae46-e4cc3a9d8f29 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.272416] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.272707] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.272955] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleting the datastore file [datastore2] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.273297] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-810e53e5-5ea9-4ee1-882c-a5b4fe762c03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.281141] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 985.281141] env[68285]: value = "task-2891701" [ 985.281141] env[68285]: _type = "Task" [ 985.281141] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.288855] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891701, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.304893] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891698, 'name': Rename_Task, 'duration_secs': 0.239302} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.305159] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.305393] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b24b2e5-f8f9-47c3-889d-b1630ab3f0b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.314787] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891699, 'name': PowerOffVM_Task, 'duration_secs': 0.173616} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.315932] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.316216] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.316520] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 985.316520] env[68285]: value = "task-2891702" [ 985.316520] env[68285]: _type = "Task" [ 985.316520] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.317184] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e1192d-c9f3-4980-8f0b-f34da63a13a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.327030] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.328976] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.329211] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1819afbf-6b03-4b1f-85b9-09b119f80b6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.337554] env[68285]: DEBUG nova.scheduler.client.report [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 985.353540] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.353540] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.353540] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Deleting the datastore file [datastore2] 8a848ec8-1ae0-4437-be4f-49219214d11f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.353816] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80ac1105-5dca-4557-b3ce-25795c30a0b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.360021] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 985.360021] env[68285]: value = "task-2891704" [ 985.360021] env[68285]: _type = "Task" [ 985.360021] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.369072] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.425306] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.443753] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891694, 'name': CloneVM_Task, 'duration_secs': 1.384365} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.444052] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Created linked-clone VM from snapshot [ 985.444825] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9014e80-9f13-4c82-b71b-0e52ac7e24f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.452418] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Uploading image 97ca943a-cd9b-44f0-83a3-f7bd725d6e49 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 985.489330] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 985.489330] env[68285]: value = "vm-580944" [ 985.489330] env[68285]: _type = "VirtualMachine" [ 985.489330] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 985.489549] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2b5ddd04-79a2-419b-9793-f9b4304b2392 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.500782] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease: (returnval){ [ 985.500782] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52122cfc-87ea-305f-5b6f-3e19fea4c932" [ 985.500782] env[68285]: _type = "HttpNfcLease" [ 985.500782] env[68285]: } obtained for exporting VM: (result){ [ 985.500782] env[68285]: value = "vm-580944" [ 985.500782] env[68285]: _type = "VirtualMachine" [ 985.500782] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 985.500782] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the lease: (returnval){ [ 985.500782] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52122cfc-87ea-305f-5b6f-3e19fea4c932" [ 985.500782] env[68285]: _type = "HttpNfcLease" [ 985.500782] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 985.510468] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.510468] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52122cfc-87ea-305f-5b6f-3e19fea4c932" [ 985.510468] env[68285]: _type = "HttpNfcLease" [ 985.510468] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 985.511122] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 985.511122] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52122cfc-87ea-305f-5b6f-3e19fea4c932" [ 985.511122] env[68285]: _type = "HttpNfcLease" [ 985.511122] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 985.512162] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d52bc01-b78a-4cf5-9953-b08891069c19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.520458] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525bcb28-2a2c-11ba-f4f4-1c1cf6f62151/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 985.520798] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525bcb28-2a2c-11ba-f4f4-1c1cf6f62151/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 985.610017] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4d49dda5-04c6-4013-8cba-4949c78d420f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.796667] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891701, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204987} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.801355] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.803979] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.803979] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.807485] env[68285]: DEBUG nova.network.neutron [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Updating instance_info_cache with network_info: [{"id": "08074d01-1b01-4ca8-a5ca-f427c8ec414e", "address": "fa:16:3e:b4:2b:1e", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08074d01-1b", "ovs_interfaceid": "08074d01-1b01-4ca8-a5ca-f427c8ec414e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.833203] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891702, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.843085] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.186s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.845587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.759s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.847225] env[68285]: INFO nova.compute.claims [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.868423] env[68285]: INFO nova.scheduler.client.report [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted allocations for instance 5266817c-ce3b-4c96-a3bd-32b631c29b81 [ 985.873343] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157443} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.880220] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.880220] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.880220] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.313098] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "refresh_cache-9569d50c-d358-4cc5-a106-32da785e4765" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.314508] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Instance network_info: |[{"id": "08074d01-1b01-4ca8-a5ca-f427c8ec414e", "address": "fa:16:3e:b4:2b:1e", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08074d01-1b", "ovs_interfaceid": "08074d01-1b01-4ca8-a5ca-f427c8ec414e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 986.315399] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:2b:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08074d01-1b01-4ca8-a5ca-f427c8ec414e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.325204] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.325742] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.331045] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be4ee450-a47f-46e2-9c21-f27ec3baf9b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.361928] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891702, 'name': PowerOnVM_Task, 'duration_secs': 0.805778} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.362897] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.362897] env[68285]: INFO nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Took 7.92 seconds to spawn the instance on the hypervisor. [ 986.362897] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.363431] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.363431] env[68285]: value = "task-2891706" [ 986.363431] env[68285]: _type = "Task" [ 986.363431] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.365653] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ecd2d5-d147-445a-8a0c-30b6d4741b36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.383977] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891706, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.384317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a4d673ac-dbd9-4f41-b5b5-65cd183790fc tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "5266817c-ce3b-4c96-a3bd-32b631c29b81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.896s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.551973] env[68285]: DEBUG nova.compute.manager [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Received event network-changed-08074d01-1b01-4ca8-a5ca-f427c8ec414e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 986.552410] env[68285]: DEBUG nova.compute.manager [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Refreshing instance network info cache due to event network-changed-08074d01-1b01-4ca8-a5ca-f427c8ec414e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 986.555061] env[68285]: DEBUG oslo_concurrency.lockutils [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] Acquiring lock "refresh_cache-9569d50c-d358-4cc5-a106-32da785e4765" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.555061] env[68285]: DEBUG oslo_concurrency.lockutils [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] Acquired lock "refresh_cache-9569d50c-d358-4cc5-a106-32da785e4765" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.555061] env[68285]: DEBUG nova.network.neutron [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Refreshing network info cache for port 08074d01-1b01-4ca8-a5ca-f427c8ec414e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 986.839541] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 986.839897] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.840464] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 986.840464] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.841627] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 986.841627] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 986.841627] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 986.841627] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 986.841627] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 986.841958] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 986.842058] env[68285]: DEBUG nova.virt.hardware [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 986.842986] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e46a25-16aa-439f-9ddc-0552fa79b257 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.852224] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce247e2a-a1b7-4bae-be18-36b5f623a957 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.872103] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:c1:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b7efc8c-8a7b-4401-86cd-f76e8836c2c4', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.881079] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.885591] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.890852] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0589cb7d-e6f5-4fb7-a548-47a74eca402b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.915946] env[68285]: INFO nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Took 49.26 seconds to build instance. [ 986.922679] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891706, 'name': CreateVM_Task, 'duration_secs': 0.510215} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.928302] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 986.928736] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.928736] env[68285]: value = "task-2891707" [ 986.928736] env[68285]: _type = "Task" [ 986.928736] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.932740] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.933221] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.933774] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 986.934480] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afbeb1f9-aa0a-44c6-9cd7-e295495f8860 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.951765] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891707, 'name': CreateVM_Task} progress is 15%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.954367] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 986.955136] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.955136] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 986.955291] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.955377] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 986.955599] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 986.955939] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 986.956254] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 986.956416] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 986.956598] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 986.956858] env[68285]: DEBUG nova.virt.hardware [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 986.962098] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27798014-fd9f-48c9-84aa-3ca24e24c03a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.965285] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 986.965285] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520ec8d6-1ad5-1c40-c467-103d93f1324c" [ 986.965285] env[68285]: _type = "Task" [ 986.965285] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.974145] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff3f3f1-27fb-4918-a889-bbcbce394320 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.983785] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520ec8d6-1ad5-1c40-c467-103d93f1324c, 'name': SearchDatastore_Task, 'duration_secs': 0.011695} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.987834] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.988655] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 986.988655] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.988885] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.989047] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 986.998366] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ed37e48-d9fd-4105-98bc-57ec51e5a4ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.000856] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 987.007175] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 987.007650] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 987.008542] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dfd63b6-120f-4e48-aa68-348fd665a57c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.026178] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.026886] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.028156] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31b5b484-4dec-4477-a953-5fb797648f23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.032277] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 987.032277] env[68285]: value = "task-2891708" [ 987.032277] env[68285]: _type = "Task" [ 987.032277] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.037500] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 987.037500] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527403b5-c1e9-d5b3-4551-b71143522501" [ 987.037500] env[68285]: _type = "Task" [ 987.037500] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.047018] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891708, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.053288] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527403b5-c1e9-d5b3-4551-b71143522501, 'name': SearchDatastore_Task, 'duration_secs': 0.01153} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.054328] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ac9c40e-c669-4fab-9456-f3282ce7c232 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.064534] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 987.064534] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521e48a9-d163-5513-ea99-5a26738d1d40" [ 987.064534] env[68285]: _type = "Task" [ 987.064534] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.076501] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521e48a9-d163-5513-ea99-5a26738d1d40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.427040] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.783s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.443754] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891707, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.458427] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8322031-bb41-408e-8904-503685577f2a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.465781] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c10745e-68f9-4812-a085-cf5e484cc355 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.498522] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816927f0-7e7b-4d33-ac78-a3ee705bc054 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.506914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47e5d1a-7e51-4703-873d-4bd9f56d4ef1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.521531] env[68285]: DEBUG nova.compute.provider_tree [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.543084] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891708, 'name': CreateVM_Task, 'duration_secs': 0.400871} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.543706] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.544290] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.544579] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.544996] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 987.545384] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55cf2976-a2b4-47e7-8e11-80d64a02e4b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.550703] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 987.550703] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521016ec-1500-d6b0-b626-1f8763dc491b" [ 987.550703] env[68285]: _type = "Task" [ 987.550703] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.560333] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521016ec-1500-d6b0-b626-1f8763dc491b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.577214] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521e48a9-d163-5513-ea99-5a26738d1d40, 'name': SearchDatastore_Task, 'duration_secs': 0.013455} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.577663] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.578077] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 9569d50c-d358-4cc5-a106-32da785e4765/9569d50c-d358-4cc5-a106-32da785e4765.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 987.578556] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11e7793d-c1f2-404c-8133-74402860c654 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.585975] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 987.585975] env[68285]: value = "task-2891709" [ 987.585975] env[68285]: _type = "Task" [ 987.585975] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.594655] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.757442] env[68285]: DEBUG nova.network.neutron [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Updated VIF entry in instance network info cache for port 08074d01-1b01-4ca8-a5ca-f427c8ec414e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.757442] env[68285]: DEBUG nova.network.neutron [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Updating instance_info_cache with network_info: [{"id": "08074d01-1b01-4ca8-a5ca-f427c8ec414e", "address": "fa:16:3e:b4:2b:1e", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08074d01-1b", "ovs_interfaceid": "08074d01-1b01-4ca8-a5ca-f427c8ec414e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.945044] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891707, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.029884] env[68285]: DEBUG nova.scheduler.client.report [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.080640] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521016ec-1500-d6b0-b626-1f8763dc491b, 'name': SearchDatastore_Task, 'duration_secs': 0.014597} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.080640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.080640] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.080640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.080640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.080640] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.080640] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a626de3-d379-41da-a00b-f1c14ff91679 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.083609] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.083609] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.083609] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-994dcfb3-db0e-42af-8b24-2c677bb1e361 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.093315] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 988.093315] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f2b615-6de3-3a0f-9a0a-96effc905c60" [ 988.093315] env[68285]: _type = "Task" [ 988.093315] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.101522] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891709, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.108661] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f2b615-6de3-3a0f-9a0a-96effc905c60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.260318] env[68285]: DEBUG oslo_concurrency.lockutils [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] Releasing lock "refresh_cache-9569d50c-d358-4cc5-a106-32da785e4765" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.260318] env[68285]: DEBUG nova.compute.manager [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.260318] env[68285]: DEBUG nova.compute.manager [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing instance network info cache due to event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 988.260318] env[68285]: DEBUG oslo_concurrency.lockutils [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] Acquiring lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.260318] env[68285]: DEBUG oslo_concurrency.lockutils [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] Acquired lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.261526] env[68285]: DEBUG nova.network.neutron [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.446041] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891707, 'name': CreateVM_Task, 'duration_secs': 1.41094} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.446041] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.446568] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.446750] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.447078] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 988.447367] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bf80610-fea6-4da7-acd0-7f98174e4bc0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.452482] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 988.452482] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]526bf4cc-2ebd-0c99-41da-72027889fc6f" [ 988.452482] env[68285]: _type = "Task" [ 988.452482] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.460926] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526bf4cc-2ebd-0c99-41da-72027889fc6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.534772] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.535379] env[68285]: DEBUG nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 988.539303] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.201s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.540026] env[68285]: DEBUG nova.objects.instance [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'resources' on Instance uuid 2a1cc678-2bb2-403e-b6e8-afdeb8362eac {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.600898] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572888} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.601921] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 9569d50c-d358-4cc5-a106-32da785e4765/9569d50c-d358-4cc5-a106-32da785e4765.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 988.602099] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.602239] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc074a0b-f3ec-4fbd-aaae-a64c7878d658 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.607627] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f2b615-6de3-3a0f-9a0a-96effc905c60, 'name': SearchDatastore_Task, 'duration_secs': 0.052042} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.609127] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5d6f9b6-acdc-4c65-bf16-a28bf9231a45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.613967] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 988.613967] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5275b20a-5b12-443f-450b-69ec83663804" [ 988.613967] env[68285]: _type = "Task" [ 988.613967] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.618681] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 988.618681] env[68285]: value = "task-2891710" [ 988.618681] env[68285]: _type = "Task" [ 988.618681] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.625507] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5275b20a-5b12-443f-450b-69ec83663804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.630183] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.922282] env[68285]: DEBUG nova.compute.manager [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.922517] env[68285]: DEBUG nova.compute.manager [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing instance network info cache due to event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 988.922723] env[68285]: DEBUG oslo_concurrency.lockutils [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] Acquiring lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.964526] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526bf4cc-2ebd-0c99-41da-72027889fc6f, 'name': SearchDatastore_Task, 'duration_secs': 0.013148} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.964901] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.965137] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.965377] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.046522] env[68285]: DEBUG nova.compute.utils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 989.049550] env[68285]: DEBUG nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 989.049747] env[68285]: DEBUG nova.network.neutron [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 989.114036] env[68285]: DEBUG nova.policy [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11e000558c8a44878a90add053bc4a70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c212f8fe09c041209a51099ad3af16d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 989.141766] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5275b20a-5b12-443f-450b-69ec83663804, 'name': SearchDatastore_Task, 'duration_secs': 0.013898} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.146358] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.146719] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.147076] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089608} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.150478] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.150733] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.150994] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ee9302a-03b1-4b84-a126-4c951421b3fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.153749] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 989.154312] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-474e29d6-e194-477d-b7ef-c92a998235b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.156751] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535d6113-5abe-4619-bd2b-a9b97d24bbeb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.185236] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 9569d50c-d358-4cc5-a106-32da785e4765/9569d50c-d358-4cc5-a106-32da785e4765.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.188062] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab80be59-8700-4d09-b956-b158bfe5776e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.204928] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 989.204928] env[68285]: value = "task-2891711" [ 989.204928] env[68285]: _type = "Task" [ 989.204928] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.205261] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.205424] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.207121] env[68285]: DEBUG nova.network.neutron [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updated VIF entry in instance network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 989.207476] env[68285]: DEBUG nova.network.neutron [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.210713] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81c9d2a2-156c-4b50-864e-e31f29d356b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.220008] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 989.220008] env[68285]: value = "task-2891712" [ 989.220008] env[68285]: _type = "Task" [ 989.220008] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.228781] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.229138] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 989.229138] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d4cd5e-29ef-76b1-a730-110e2eabdf3f" [ 989.229138] env[68285]: _type = "Task" [ 989.229138] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.243096] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891712, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.247714] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d4cd5e-29ef-76b1-a730-110e2eabdf3f, 'name': SearchDatastore_Task, 'duration_secs': 0.013456} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.253694] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d11aa6f8-ba9a-4dcc-a274-2610ea593ae1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.259768] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 989.259768] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f2cdae-818c-3120-5dca-99bae0e999e0" [ 989.259768] env[68285]: _type = "Task" [ 989.259768] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.269008] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f2cdae-818c-3120-5dca-99bae0e999e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.459953] env[68285]: DEBUG nova.network.neutron [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Successfully created port: efc7ff98-8b15-4f2b-9c65-16a914ff393a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 989.552493] env[68285]: DEBUG nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 989.658073] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f46053-3275-4b47-a170-a64ffe1f8e90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.663480] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db0e818-e78e-4299-9b94-956c1779681b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.696369] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164ed05b-f86f-4aef-898d-31d521a93156 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.705660] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cd3fac-163e-4196-a12e-274b505c8937 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.714375] env[68285]: DEBUG oslo_concurrency.lockutils [req-12c76dc0-72bf-4ce4-aed2-b544758b2986 req-9a2ba4f2-0ad4-431d-90d3-f2fac7aed3a5 service nova] Releasing lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.715258] env[68285]: DEBUG oslo_concurrency.lockutils [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] Acquired lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.715258] env[68285]: DEBUG nova.network.neutron [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.735052] env[68285]: DEBUG nova.compute.provider_tree [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.743410] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891711, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.750190] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891712, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.771868] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f2cdae-818c-3120-5dca-99bae0e999e0, 'name': SearchDatastore_Task, 'duration_secs': 0.014808} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.772185] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.772510] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.772835] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e722b9fc-4dff-43c4-b190-e9c41105b686 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.782070] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 989.782070] env[68285]: value = "task-2891713" [ 989.782070] env[68285]: _type = "Task" [ 989.782070] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.790354] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.224782] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891711, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80167} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.225907] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.227449] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.227449] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f795ed4f-1bc4-421f-b896-4566dcd4d1e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.240475] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 990.240475] env[68285]: value = "task-2891714" [ 990.240475] env[68285]: _type = "Task" [ 990.240475] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.249397] env[68285]: DEBUG nova.scheduler.client.report [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.256933] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891712, 'name': ReconfigVM_Task, 'duration_secs': 0.817933} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.258055] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 9569d50c-d358-4cc5-a106-32da785e4765/9569d50c-d358-4cc5-a106-32da785e4765.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.258763] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1ed68f8-cc0f-47c0-b3d2-1bfcd7a02386 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.267086] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.274760] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 990.274760] env[68285]: value = "task-2891715" [ 990.274760] env[68285]: _type = "Task" [ 990.274760] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.289803] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891715, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.296253] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891713, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.563373] env[68285]: DEBUG nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 990.597605] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 990.597843] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.598672] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 990.598672] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.598672] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 990.598845] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 990.599122] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 990.599314] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 990.599514] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 990.599709] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 990.600281] env[68285]: DEBUG nova.virt.hardware [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 990.601214] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674136b9-320d-405e-9191-9836e3abf79b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.609867] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce52cb9-4141-469b-b061-f338674971d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.695264] env[68285]: DEBUG nova.network.neutron [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updated VIF entry in instance network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.695762] env[68285]: DEBUG nova.network.neutron [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.751648] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083631} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.751959] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.752877] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175d6129-b8e4-4248-888f-8c697116e6fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.765580] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.226s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.776794] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.777468] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 42.526s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.777657] env[68285]: DEBUG nova.objects.instance [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 990.780834] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-705f01fc-6f96-4f53-83b5-3ee7824d5546 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.799926] env[68285]: INFO nova.scheduler.client.report [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted allocations for instance 2a1cc678-2bb2-403e-b6e8-afdeb8362eac [ 990.809755] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 990.809755] env[68285]: value = "task-2891716" [ 990.809755] env[68285]: _type = "Task" [ 990.809755] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.813332] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701247} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.820855] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.821123] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.821425] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891715, 'name': Rename_Task, 'duration_secs': 0.492712} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.822682] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25863866-e7ce-44fa-b4b7-1df4704f4def {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.824711] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.825606] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6b69303-0e7c-41bb-9537-d65cca1e281a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.833547] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891716, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.838356] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 990.838356] env[68285]: value = "task-2891717" [ 990.838356] env[68285]: _type = "Task" [ 990.838356] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.839776] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 990.839776] env[68285]: value = "task-2891718" [ 990.839776] env[68285]: _type = "Task" [ 990.839776] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.853696] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891717, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.857837] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891718, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.017192] env[68285]: DEBUG nova.network.neutron [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Successfully updated port: efc7ff98-8b15-4f2b-9c65-16a914ff393a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 991.201865] env[68285]: DEBUG oslo_concurrency.lockutils [req-4293f94f-f0cd-4b7d-bc65-43907361b094 req-683d9fe1-e91e-4207-b6d6-14db3d1ebbb1 service nova] Releasing lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.312207] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a82abb1a-8d6f-4527-87bf-e89d651b8cd3 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "2a1cc678-2bb2-403e-b6e8-afdeb8362eac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.731s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.330327] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891716, 'name': ReconfigVM_Task, 'duration_secs': 0.418843} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.330327] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f/8a848ec8-1ae0-4437-be4f-49219214d11f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.334143] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a032f7a-53a1-4728-9d8d-fe50637572f0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.338779] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 991.338779] env[68285]: value = "task-2891719" [ 991.338779] env[68285]: _type = "Task" [ 991.338779] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.356928] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891719, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.360388] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891717, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094548} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.360630] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.361510] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590e121d-6061-4213-99c3-e9d43d436b0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.368139] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891718, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.370659] env[68285]: DEBUG nova.compute.manager [req-7856559b-1268-424f-9392-2c13e50df99f req-60fbaece-a425-4ecf-84bf-34b8c611fa10 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Received event network-vif-plugged-efc7ff98-8b15-4f2b-9c65-16a914ff393a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.370841] env[68285]: DEBUG oslo_concurrency.lockutils [req-7856559b-1268-424f-9392-2c13e50df99f req-60fbaece-a425-4ecf-84bf-34b8c611fa10 service nova] Acquiring lock "64103f25-6411-44be-a60f-b9c276dba331-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.371029] env[68285]: DEBUG oslo_concurrency.lockutils [req-7856559b-1268-424f-9392-2c13e50df99f req-60fbaece-a425-4ecf-84bf-34b8c611fa10 service nova] Lock "64103f25-6411-44be-a60f-b9c276dba331-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.371228] env[68285]: DEBUG oslo_concurrency.lockutils [req-7856559b-1268-424f-9392-2c13e50df99f req-60fbaece-a425-4ecf-84bf-34b8c611fa10 service nova] Lock "64103f25-6411-44be-a60f-b9c276dba331-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.371347] env[68285]: DEBUG nova.compute.manager [req-7856559b-1268-424f-9392-2c13e50df99f req-60fbaece-a425-4ecf-84bf-34b8c611fa10 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] No waiting events found dispatching network-vif-plugged-efc7ff98-8b15-4f2b-9c65-16a914ff393a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.371534] env[68285]: WARNING nova.compute.manager [req-7856559b-1268-424f-9392-2c13e50df99f req-60fbaece-a425-4ecf-84bf-34b8c611fa10 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Received unexpected event network-vif-plugged-efc7ff98-8b15-4f2b-9c65-16a914ff393a for instance with vm_state building and task_state spawning. [ 991.391818] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.392639] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3073a41-ec81-4cee-9fa8-ebe1054ebe44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.415210] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 991.415210] env[68285]: value = "task-2891720" [ 991.415210] env[68285]: _type = "Task" [ 991.415210] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.425227] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891720, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.460248] env[68285]: DEBUG nova.compute.manager [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.460479] env[68285]: DEBUG nova.compute.manager [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing instance network info cache due to event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 991.460696] env[68285]: DEBUG oslo_concurrency.lockutils [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] Acquiring lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.460880] env[68285]: DEBUG oslo_concurrency.lockutils [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] Acquired lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.461086] env[68285]: DEBUG nova.network.neutron [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.520587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "refresh_cache-64103f25-6411-44be-a60f-b9c276dba331" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.520830] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "refresh_cache-64103f25-6411-44be-a60f-b9c276dba331" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.521054] env[68285]: DEBUG nova.network.neutron [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.802204] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0850a2b3-de81-4302-a372-80b9448b1760 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.803276] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.637s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.803495] env[68285]: DEBUG nova.objects.instance [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lazy-loading 'resources' on Instance uuid c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.851884] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891719, 'name': Rename_Task, 'duration_secs': 0.251801} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.854986] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.855262] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59e63080-aad9-477d-b9c6-66acb64a3e7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.861710] env[68285]: DEBUG oslo_vmware.api [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891718, 'name': PowerOnVM_Task, 'duration_secs': 0.795571} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.863050] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.863264] env[68285]: INFO nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Took 8.35 seconds to spawn the instance on the hypervisor. [ 991.863440] env[68285]: DEBUG nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.863755] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Waiting for the task: (returnval){ [ 991.863755] env[68285]: value = "task-2891721" [ 991.863755] env[68285]: _type = "Task" [ 991.863755] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.864455] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b3180c-9709-4faf-91a3-43d1c0a4b5be {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.886251] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891721, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.925292] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891720, 'name': ReconfigVM_Task, 'duration_secs': 0.284938} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.925520] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Reconfigured VM instance instance-00000039 to attach disk [datastore1] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2/f1b8808d-c3a1-4be6-b6ec-ed441291e8f2.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.926174] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7c766ec-a692-4ff4-97af-c9cd35035c34 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.934320] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 991.934320] env[68285]: value = "task-2891722" [ 991.934320] env[68285]: _type = "Task" [ 991.934320] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.943924] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891722, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.081946] env[68285]: DEBUG nova.network.neutron [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 992.182082] env[68285]: DEBUG nova.network.neutron [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updated VIF entry in instance network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.182471] env[68285]: DEBUG nova.network.neutron [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.339242] env[68285]: DEBUG nova.network.neutron [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Updating instance_info_cache with network_info: [{"id": "efc7ff98-8b15-4f2b-9c65-16a914ff393a", "address": "fa:16:3e:71:71:1f", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc7ff98-8b", "ovs_interfaceid": "efc7ff98-8b15-4f2b-9c65-16a914ff393a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.379290] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891721, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.397426] env[68285]: INFO nova.compute.manager [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Took 54.69 seconds to build instance. [ 992.448309] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891722, 'name': Rename_Task, 'duration_secs': 0.14353} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.448533] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.448981] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a0dc747-0118-4896-8d72-af58c6725ca7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.456077] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 992.456077] env[68285]: value = "task-2891723" [ 992.456077] env[68285]: _type = "Task" [ 992.456077] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.467992] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.686577] env[68285]: DEBUG oslo_concurrency.lockutils [req-73f320c5-3d43-46f6-9433-868492cab805 req-d2b60d61-1101-4c2f-90a1-439ba9f8b779 service nova] Releasing lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.807835] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1cd099-c766-4542-b873-44a9d5bc1b9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.815385] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a63ab49-fc95-4895-ac89-9e4d6a1684e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.850159] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "refresh_cache-64103f25-6411-44be-a60f-b9c276dba331" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.850516] env[68285]: DEBUG nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Instance network_info: |[{"id": "efc7ff98-8b15-4f2b-9c65-16a914ff393a", "address": "fa:16:3e:71:71:1f", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc7ff98-8b", "ovs_interfaceid": "efc7ff98-8b15-4f2b-9c65-16a914ff393a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 992.851255] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:71:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b89fd3b-0470-40c9-bb5b-d52c76c030e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efc7ff98-8b15-4f2b-9c65-16a914ff393a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.859135] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.859863] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08c151e-8d79-4ecc-9ca5-a001f63c9c05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.863233] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.863233] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ba1a47f-048c-4871-a96e-79bda220b31b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.886264] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68307c5c-b568-4e08-a533-8f404541c4a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.894277] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.894277] env[68285]: value = "task-2891724" [ 992.894277] env[68285]: _type = "Task" [ 992.894277] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.894589] env[68285]: DEBUG oslo_vmware.api [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Task: {'id': task-2891721, 'name': PowerOnVM_Task, 'duration_secs': 0.80286} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.895351] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 992.895576] env[68285]: DEBUG nova.compute.manager [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.899893] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128203fe-c465-42fa-be2c-2db731b66f71 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.912916] env[68285]: DEBUG oslo_concurrency.lockutils [None req-40e148a8-5099-494f-8e36-eb7c7f4bdf84 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "9569d50c-d358-4cc5-a106-32da785e4765" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.221s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.915109] env[68285]: DEBUG nova.compute.provider_tree [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.922330] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891724, 'name': CreateVM_Task} progress is 15%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.971785] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891723, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.404808] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891724, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.418424] env[68285]: DEBUG nova.scheduler.client.report [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.434785] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.473156] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891723, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.519758] env[68285]: DEBUG oslo_concurrency.lockutils [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.520140] env[68285]: DEBUG oslo_concurrency.lockutils [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.798554] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c7fa7806-a4c7-4e2e-a3d1-e744ef3de607 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "c090592b-4bd4-423b-b5b9-68a2c220e388" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.798841] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c7fa7806-a4c7-4e2e-a3d1-e744ef3de607 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "c090592b-4bd4-423b-b5b9-68a2c220e388" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.801026] env[68285]: DEBUG nova.compute.manager [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Received event network-changed-efc7ff98-8b15-4f2b-9c65-16a914ff393a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.801291] env[68285]: DEBUG nova.compute.manager [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Refreshing instance network info cache due to event network-changed-efc7ff98-8b15-4f2b-9c65-16a914ff393a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 993.801522] env[68285]: DEBUG oslo_concurrency.lockutils [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] Acquiring lock "refresh_cache-64103f25-6411-44be-a60f-b9c276dba331" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.801687] env[68285]: DEBUG oslo_concurrency.lockutils [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] Acquired lock "refresh_cache-64103f25-6411-44be-a60f-b9c276dba331" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.801898] env[68285]: DEBUG nova.network.neutron [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Refreshing network info cache for port efc7ff98-8b15-4f2b-9c65-16a914ff393a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 993.874606] env[68285]: DEBUG nova.compute.manager [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.874808] env[68285]: DEBUG nova.compute.manager [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing instance network info cache due to event network-changed-ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 993.875064] env[68285]: DEBUG oslo_concurrency.lockutils [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] Acquiring lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.875223] env[68285]: DEBUG oslo_concurrency.lockutils [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] Acquired lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.875384] env[68285]: DEBUG nova.network.neutron [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Refreshing network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 993.876769] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.876983] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.877346] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.877549] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.877718] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.879632] env[68285]: INFO nova.compute.manager [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Terminating instance [ 993.905861] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891724, 'name': CreateVM_Task, 'duration_secs': 0.688258} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.906042] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.906734] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.906928] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.907276] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 993.907576] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54726922-de05-43b0-be60-5d992b18b5de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.913268] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 993.913268] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5236accf-bcd6-b09c-6489-ae26b9480c50" [ 993.913268] env[68285]: _type = "Task" [ 993.913268] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.921951] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5236accf-bcd6-b09c-6489-ae26b9480c50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.923787] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.120s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.925894] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.563s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.926137] env[68285]: DEBUG nova.objects.instance [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lazy-loading 'resources' on Instance uuid 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.950797] env[68285]: INFO nova.scheduler.client.report [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Deleted allocations for instance c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e [ 993.969996] env[68285]: DEBUG oslo_vmware.api [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891723, 'name': PowerOnVM_Task, 'duration_secs': 1.084866} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.970590] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.970833] env[68285]: DEBUG nova.compute.manager [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 993.971700] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5874035-ce65-4e4c-a716-423ccf000106 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.992419] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525bcb28-2a2c-11ba-f4f4-1c1cf6f62151/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 993.993419] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5086f119-b178-4f52-ae29-01cf67bee1c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.000377] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525bcb28-2a2c-11ba-f4f4-1c1cf6f62151/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 994.000553] env[68285]: ERROR oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525bcb28-2a2c-11ba-f4f4-1c1cf6f62151/disk-0.vmdk due to incomplete transfer. [ 994.001388] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2e96ed62-f93f-4e58-81fb-59f46863d1e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.009198] env[68285]: DEBUG oslo_vmware.rw_handles [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525bcb28-2a2c-11ba-f4f4-1c1cf6f62151/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 994.009387] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Uploaded image 97ca943a-cd9b-44f0-83a3-f7bd725d6e49 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 994.014161] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 994.014161] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-316fed08-932d-4819-ab5a-e3fae5e0f318 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.020083] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 994.020083] env[68285]: value = "task-2891725" [ 994.020083] env[68285]: _type = "Task" [ 994.020083] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.031553] env[68285]: INFO nova.compute.manager [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Detaching volume f568d87f-424d-4432-b017-6f7542b87545 [ 994.033315] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891725, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.071508] env[68285]: INFO nova.virt.block_device [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Attempting to driver detach volume f568d87f-424d-4432-b017-6f7542b87545 from mountpoint /dev/sdb [ 994.071693] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 994.071929] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580917', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'name': 'volume-f568d87f-424d-4432-b017-6f7542b87545', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5e101d74-7a82-4118-8f4c-7af9a6b0917a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'serial': 'f568d87f-424d-4432-b017-6f7542b87545'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 994.074748] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad04119c-c838-4c27-89e7-1693bcb41ca3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.102346] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287d14a4-644e-4c51-a6cf-effd368f8cd5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.110188] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f7b6e0-8340-4d40-a0cf-f8b8655284e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.133524] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c313fd4-a7a9-4da4-a83f-b0e4bd25cbf4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.149954] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] The volume has not been displaced from its original location: [datastore1] volume-f568d87f-424d-4432-b017-6f7542b87545/volume-f568d87f-424d-4432-b017-6f7542b87545.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 994.155297] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Reconfiguring VM instance instance-00000017 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 994.155649] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd4f09ed-7c46-4e73-9944-a224bee49a71 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.174282] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 994.174282] env[68285]: value = "task-2891726" [ 994.174282] env[68285]: _type = "Task" [ 994.174282] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.182924] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.304791] env[68285]: DEBUG nova.compute.manager [None req-c7fa7806-a4c7-4e2e-a3d1-e744ef3de607 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: c090592b-4bd4-423b-b5b9-68a2c220e388] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 994.384571] env[68285]: DEBUG nova.compute.manager [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.384811] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.385794] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bdf58a-3910-4248-b5fa-d21b43359edc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.399926] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.400167] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a046667c-73ba-4d81-a328-33e3e2598a4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.407632] env[68285]: DEBUG oslo_vmware.api [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 994.407632] env[68285]: value = "task-2891728" [ 994.407632] env[68285]: _type = "Task" [ 994.407632] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.416493] env[68285]: DEBUG oslo_vmware.api [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.426010] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5236accf-bcd6-b09c-6489-ae26b9480c50, 'name': SearchDatastore_Task, 'duration_secs': 0.009943} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.428657] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.428893] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.429193] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.429346] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.429556] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 994.434972] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-896d8b57-6eb4-4486-bf82-43f0cd1c297d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.439386] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "8a848ec8-1ae0-4437-be4f-49219214d11f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.439626] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "8a848ec8-1ae0-4437-be4f-49219214d11f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.439816] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "8a848ec8-1ae0-4437-be4f-49219214d11f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.440029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "8a848ec8-1ae0-4437-be4f-49219214d11f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.440274] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "8a848ec8-1ae0-4437-be4f-49219214d11f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.443126] env[68285]: INFO nova.compute.manager [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Terminating instance [ 994.445645] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.445817] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.447847] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de48a435-210d-4e72-b31f-6cd27f30f32c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.453109] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 994.453109] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ab9c60-8569-db3d-46c0-e257472dd431" [ 994.453109] env[68285]: _type = "Task" [ 994.453109] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.461968] env[68285]: DEBUG oslo_concurrency.lockutils [None req-389b9cc2-450e-41c7-acd5-d1cf36e4cd69 tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.985s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.474773] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ab9c60-8569-db3d-46c0-e257472dd431, 'name': SearchDatastore_Task, 'duration_secs': 0.017827} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.481678] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0833bb84-d87d-4863-bf47-665f2955bdfb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.493180] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 994.493180] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528d5708-a9c7-5f40-2381-09c238cde884" [ 994.493180] env[68285]: _type = "Task" [ 994.493180] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.496119] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.508139] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528d5708-a9c7-5f40-2381-09c238cde884, 'name': SearchDatastore_Task, 'duration_secs': 0.010994} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.508416] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.508684] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 64103f25-6411-44be-a60f-b9c276dba331/64103f25-6411-44be-a60f-b9c276dba331.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.508946] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db8f288f-228c-4ecf-961c-1a3aef6b28b2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.518213] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 994.518213] env[68285]: value = "task-2891729" [ 994.518213] env[68285]: _type = "Task" [ 994.518213] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.536534] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.540304] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891725, 'name': Destroy_Task, 'duration_secs': 0.353456} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.540616] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Destroyed the VM [ 994.540863] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 994.541136] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e6558900-554c-4312-9b31-639a849f8267 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.549632] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 994.549632] env[68285]: value = "task-2891730" [ 994.549632] env[68285]: _type = "Task" [ 994.549632] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.559964] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891730, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.578960] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "32d23c62-23ec-4732-a95d-6ac32805e1b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.580086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.580086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "32d23c62-23ec-4732-a95d-6ac32805e1b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.580086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.580086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.584959] env[68285]: INFO nova.compute.manager [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Terminating instance [ 994.651968] env[68285]: DEBUG nova.network.neutron [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updated VIF entry in instance network info cache for port ee14be75-4848-4471-9d06-29e7a06446fd. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.652360] env[68285]: DEBUG nova.network.neutron [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [{"id": "ee14be75-4848-4471-9d06-29e7a06446fd", "address": "fa:16:3e:16:c8:ee", "network": {"id": "dab36320-0163-4a17-8e23-ccb4a6db67a2", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-677118867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "31396f8bc32b48e883ef6bd7c38ad3c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee14be75-48", "ovs_interfaceid": "ee14be75-4848-4471-9d06-29e7a06446fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.691571] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.707684] env[68285]: DEBUG nova.network.neutron [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Updated VIF entry in instance network info cache for port efc7ff98-8b15-4f2b-9c65-16a914ff393a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.707684] env[68285]: DEBUG nova.network.neutron [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Updating instance_info_cache with network_info: [{"id": "efc7ff98-8b15-4f2b-9c65-16a914ff393a", "address": "fa:16:3e:71:71:1f", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc7ff98-8b", "ovs_interfaceid": "efc7ff98-8b15-4f2b-9c65-16a914ff393a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.813802] env[68285]: DEBUG nova.compute.manager [None req-c7fa7806-a4c7-4e2e-a3d1-e744ef3de607 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: c090592b-4bd4-423b-b5b9-68a2c220e388] Instance disappeared before build. {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 994.899205] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "9569d50c-d358-4cc5-a106-32da785e4765" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.899568] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "9569d50c-d358-4cc5-a106-32da785e4765" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.899831] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "9569d50c-d358-4cc5-a106-32da785e4765-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.900170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "9569d50c-d358-4cc5-a106-32da785e4765-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.901238] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "9569d50c-d358-4cc5-a106-32da785e4765-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.904084] env[68285]: INFO nova.compute.manager [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Terminating instance [ 994.928353] env[68285]: DEBUG oslo_vmware.api [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891728, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.944829] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62783c73-84d3-49c0-a059-6eea756cd67e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.951588] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "refresh_cache-8a848ec8-1ae0-4437-be4f-49219214d11f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.951854] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquired lock "refresh_cache-8a848ec8-1ae0-4437-be4f-49219214d11f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.952115] env[68285]: DEBUG nova.network.neutron [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 994.956997] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d4a787-3734-402a-9afd-a3ae94e3ede5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.993737] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ac3462-2381-4da9-bd6a-3c7215cfb180 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.003688] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0881688-6059-42a5-8820-c1866238f3ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.022821] env[68285]: DEBUG nova.compute.provider_tree [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.034216] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891729, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.060617] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891730, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.091579] env[68285]: DEBUG nova.compute.manager [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 995.091579] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.092345] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f021c184-f08b-49e5-a0bd-3f8197f2ce0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.103249] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.103249] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4516abcf-1e13-43fc-91ea-c7d540396ad5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.109764] env[68285]: DEBUG oslo_vmware.api [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 995.109764] env[68285]: value = "task-2891731" [ 995.109764] env[68285]: _type = "Task" [ 995.109764] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.119753] env[68285]: DEBUG oslo_vmware.api [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.155435] env[68285]: DEBUG oslo_concurrency.lockutils [req-c3145e49-f170-41b4-8ec7-2d7ef330944f req-e7971a10-f567-471a-b134-09938f0a90e0 service nova] Releasing lock "refresh_cache-7bef3e2a-00ab-480a-aa8c-335635ee5d31" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.188801] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.210692] env[68285]: DEBUG oslo_concurrency.lockutils [req-ffcaaae4-24c3-4f15-ab5e-1273dcc88f57 req-b167b721-c2e8-4f67-8ff6-4449eedf7167 service nova] Releasing lock "refresh_cache-64103f25-6411-44be-a60f-b9c276dba331" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.326914] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c7fa7806-a4c7-4e2e-a3d1-e744ef3de607 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "c090592b-4bd4-423b-b5b9-68a2c220e388" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.528s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.410993] env[68285]: DEBUG nova.compute.manager [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 995.411265] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.415173] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361b9c94-ac7b-4824-99ac-bbe09bf0b5af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.423499] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.427567] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90d6da26-3dd8-4734-816d-02b951423a8e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.432719] env[68285]: DEBUG oslo_vmware.api [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891728, 'name': PowerOffVM_Task, 'duration_secs': 0.603825} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.432719] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.432831] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.433769] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-899fa120-5de4-40cf-9f31-1c99150cd739 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.438683] env[68285]: DEBUG oslo_vmware.api [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 995.438683] env[68285]: value = "task-2891732" [ 995.438683] env[68285]: _type = "Task" [ 995.438683] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.447872] env[68285]: DEBUG oslo_vmware.api [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.480408] env[68285]: DEBUG nova.network.neutron [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 995.530069] env[68285]: DEBUG nova.scheduler.client.report [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.534337] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.538018] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.538018] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Deleting the datastore file [datastore1] 7bef3e2a-00ab-480a-aa8c-335635ee5d31 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.538018] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd9a46cb-1eef-4da6-8295-21231b48fb73 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.543604] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548816} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.544606] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 64103f25-6411-44be-a60f-b9c276dba331/64103f25-6411-44be-a60f-b9c276dba331.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.545128] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.545484] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2281d42c-2081-48f1-8000-8cef11dc3671 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.555734] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 995.555734] env[68285]: value = "task-2891735" [ 995.555734] env[68285]: _type = "Task" [ 995.555734] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.556888] env[68285]: DEBUG oslo_vmware.api [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for the task: (returnval){ [ 995.556888] env[68285]: value = "task-2891734" [ 995.556888] env[68285]: _type = "Task" [ 995.556888] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.583134] env[68285]: DEBUG oslo_vmware.api [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891730, 'name': RemoveSnapshot_Task, 'duration_secs': 0.674029} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.584106] env[68285]: DEBUG nova.network.neutron [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.591842] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 995.592112] env[68285]: INFO nova.compute.manager [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Took 13.75 seconds to snapshot the instance on the hypervisor. [ 995.597176] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891735, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.597176] env[68285]: DEBUG oslo_vmware.api [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.600284] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "f2696c7f-5676-403a-87e0-fb0884866005" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.600524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.621323] env[68285]: DEBUG oslo_vmware.api [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891731, 'name': PowerOffVM_Task, 'duration_secs': 0.372567} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.621605] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.621802] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.622153] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e0c48a5-29c0-4036-80c2-afef36815727 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.688940] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891726, 'name': ReconfigVM_Task, 'duration_secs': 1.341353} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.690314] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Reconfigured VM instance instance-00000017 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 995.695354] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.695591] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.695776] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleting the datastore file [datastore1] 32d23c62-23ec-4732-a95d-6ac32805e1b9 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.696098] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f12c1b35-05d4-41bd-ba9a-da7a7e14a406 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.706767] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07f24475-81b2-49c6-8574-aeefa7bf0c69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.716822] env[68285]: DEBUG oslo_vmware.api [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 995.716822] env[68285]: value = "task-2891737" [ 995.716822] env[68285]: _type = "Task" [ 995.716822] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.718360] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 995.718360] env[68285]: value = "task-2891738" [ 995.718360] env[68285]: _type = "Task" [ 995.718360] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.731457] env[68285]: DEBUG oslo_vmware.api [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.735322] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891738, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.934690] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.934958] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.935248] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.935473] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.936114] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.938286] env[68285]: INFO nova.compute.manager [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Terminating instance [ 995.950326] env[68285]: DEBUG oslo_vmware.api [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891732, 'name': PowerOffVM_Task, 'duration_secs': 0.264203} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.951026] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.951026] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.951489] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71a351ec-4349-4e31-8caa-a254b54ccc95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.027758] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 996.028090] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 996.028266] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleting the datastore file [datastore1] 9569d50c-d358-4cc5-a106-32da785e4765 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.028605] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a008d95c-88eb-4921-85b3-6ea745587209 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.036401] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.038515] env[68285]: DEBUG oslo_vmware.api [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 996.038515] env[68285]: value = "task-2891740" [ 996.038515] env[68285]: _type = "Task" [ 996.038515] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.039053] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.471s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.040455] env[68285]: INFO nova.compute.claims [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.052716] env[68285]: DEBUG oslo_vmware.api [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891740, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.057643] env[68285]: INFO nova.scheduler.client.report [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted allocations for instance 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5 [ 996.075553] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891735, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079245} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.078820] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.079168] env[68285]: DEBUG oslo_vmware.api [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Task: {'id': task-2891734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216226} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.079864] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c84cb8-5f76-48ae-8982-749e679b5854 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.082423] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.082601] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.082773] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.082943] env[68285]: INFO nova.compute.manager [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Took 1.70 seconds to destroy the instance on the hypervisor. [ 996.083810] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.083810] env[68285]: DEBUG nova.compute.manager [-] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 996.083810] env[68285]: DEBUG nova.network.neutron [-] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.106664] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 64103f25-6411-44be-a60f-b9c276dba331/64103f25-6411-44be-a60f-b9c276dba331.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.109421] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Releasing lock "refresh_cache-8a848ec8-1ae0-4437-be4f-49219214d11f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.109910] env[68285]: DEBUG nova.compute.manager [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 996.110044] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 996.110373] env[68285]: DEBUG nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 996.114060] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1ab8a16-d501-49f3-bc59-712ea30a3d1c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.129971] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf6c293-bb2e-478b-a239-e3154fb503b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.138858] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.140840] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afb4e11f-aca5-44f4-b13e-203484e7f4bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.142500] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 996.142500] env[68285]: value = "task-2891741" [ 996.142500] env[68285]: _type = "Task" [ 996.142500] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.148181] env[68285]: DEBUG oslo_vmware.api [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 996.148181] env[68285]: value = "task-2891742" [ 996.148181] env[68285]: _type = "Task" [ 996.148181] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.155460] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891741, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.164634] env[68285]: DEBUG oslo_vmware.api [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.202718] env[68285]: DEBUG nova.compute.manager [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Found 3 images (rotation: 2) {{(pid=68285) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 996.202935] env[68285]: DEBUG nova.compute.manager [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Rotating out 1 backups {{(pid=68285) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 996.203177] env[68285]: DEBUG nova.compute.manager [None req-6680b260-7baa-43bc-a060-51307e106671 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleting image 5d4138b6-a36b-4e65-84a4-3ee0131980e6 {{(pid=68285) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 996.231619] env[68285]: DEBUG oslo_vmware.api [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171843} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.232490] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.232894] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.232949] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.233202] env[68285]: INFO nova.compute.manager [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 996.233417] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.237256] env[68285]: DEBUG nova.compute.manager [-] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 996.237361] env[68285]: DEBUG nova.network.neutron [-] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.239322] env[68285]: DEBUG oslo_vmware.api [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891738, 'name': ReconfigVM_Task, 'duration_secs': 0.159559} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.239657] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580917', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'name': 'volume-f568d87f-424d-4432-b017-6f7542b87545', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5e101d74-7a82-4118-8f4c-7af9a6b0917a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f568d87f-424d-4432-b017-6f7542b87545', 'serial': 'f568d87f-424d-4432-b017-6f7542b87545'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 996.452151] env[68285]: DEBUG nova.compute.manager [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 996.452408] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 996.453322] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b635d32c-ea2d-4bb6-9136-7817793761b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.464562] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.464562] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44f62e71-3fcd-4285-a1ee-3be67e66e9d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.471938] env[68285]: DEBUG oslo_vmware.api [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 996.471938] env[68285]: value = "task-2891743" [ 996.471938] env[68285]: _type = "Task" [ 996.471938] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.480934] env[68285]: DEBUG oslo_vmware.api [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.559033] env[68285]: DEBUG oslo_vmware.api [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891740, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213241} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.559033] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.559155] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.559339] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.559514] env[68285]: INFO nova.compute.manager [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Took 1.15 seconds to destroy the instance on the hypervisor. [ 996.559750] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.560853] env[68285]: DEBUG nova.compute.manager [-] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 996.560853] env[68285]: DEBUG nova.network.neutron [-] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.568407] env[68285]: DEBUG oslo_concurrency.lockutils [None req-731583c8-10a2-4159-8890-851cc05caafd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "7df1a9b4-e363-4e35-a8d5-6b09b671e6a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.007s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.587300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "1f5fe064-0443-4b7f-911a-45d803836eeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.587535] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.590289] env[68285]: DEBUG nova.compute.manager [req-aadadc37-2191-41d7-95ec-6713e00990e3 req-c58a417e-9508-4590-82e4-ec37f91474f9 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Received event network-vif-deleted-d542d712-22ed-45d7-bf6e-ce3ae5cf5556 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 996.590497] env[68285]: INFO nova.compute.manager [req-aadadc37-2191-41d7-95ec-6713e00990e3 req-c58a417e-9508-4590-82e4-ec37f91474f9 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Neutron deleted interface d542d712-22ed-45d7-bf6e-ce3ae5cf5556; detaching it from the instance and deleting it from the info cache [ 996.590692] env[68285]: DEBUG nova.network.neutron [req-aadadc37-2191-41d7-95ec-6713e00990e3 req-c58a417e-9508-4590-82e4-ec37f91474f9 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.613405] env[68285]: DEBUG nova.compute.manager [req-a441e6d3-eecd-4245-bbfe-56e8849a1623 req-cb19eec8-1b01-4d3d-8d57-0ad702992faa service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Received event network-vif-deleted-ee14be75-4848-4471-9d06-29e7a06446fd {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 996.613609] env[68285]: INFO nova.compute.manager [req-a441e6d3-eecd-4245-bbfe-56e8849a1623 req-cb19eec8-1b01-4d3d-8d57-0ad702992faa service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Neutron deleted interface ee14be75-4848-4471-9d06-29e7a06446fd; detaching it from the instance and deleting it from the info cache [ 996.613779] env[68285]: DEBUG nova.network.neutron [req-a441e6d3-eecd-4245-bbfe-56e8849a1623 req-cb19eec8-1b01-4d3d-8d57-0ad702992faa service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.657095] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.662916] env[68285]: DEBUG oslo_vmware.api [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891742, 'name': PowerOffVM_Task, 'duration_secs': 0.224394} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.663185] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.663382] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 996.663954] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e4bf1a2-2c22-4884-8c4e-5c154e5bd756 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.687759] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 996.687952] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 996.688186] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Deleting the datastore file [datastore1] 8a848ec8-1ae0-4437-be4f-49219214d11f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.688457] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff43cfac-8cce-4dc8-b3c2-f4319d1505f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.695924] env[68285]: DEBUG oslo_vmware.api [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for the task: (returnval){ [ 996.695924] env[68285]: value = "task-2891745" [ 996.695924] env[68285]: _type = "Task" [ 996.695924] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.704297] env[68285]: DEBUG oslo_vmware.api [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.775873] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.790461] env[68285]: DEBUG nova.objects.instance [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.981674] env[68285]: DEBUG oslo_vmware.api [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891743, 'name': PowerOffVM_Task, 'duration_secs': 0.191222} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.981961] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.982146] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 996.982395] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d0b8168-11ea-43c1-9cf4-90f43102975b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.038850] env[68285]: DEBUG nova.network.neutron [-] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.052308] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.052394] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.053157] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleting the datastore file [datastore1] f1b8808d-c3a1-4be6-b6ec-ed441291e8f2 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.056063] env[68285]: DEBUG nova.network.neutron [-] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.057133] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-276937c1-1208-4aed-a4e9-2a5023a9124e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.066264] env[68285]: DEBUG oslo_vmware.api [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 997.066264] env[68285]: value = "task-2891747" [ 997.066264] env[68285]: _type = "Task" [ 997.066264] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.075047] env[68285]: DEBUG oslo_vmware.api [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891747, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.095225] env[68285]: DEBUG nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 997.099702] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-378bb3d0-fa67-4567-99f8-73092b89f7ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.107665] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2ff85a-5753-41de-800b-0ce0bd45aab5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.122029] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5bdd7660-4f52-4da9-a3ae-6d2a4d1e4a26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.129823] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d2cda4-424e-4fd5-b7a1-49f3a31e3562 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.154641] env[68285]: DEBUG nova.compute.manager [req-aadadc37-2191-41d7-95ec-6713e00990e3 req-c58a417e-9508-4590-82e4-ec37f91474f9 service nova] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Detach interface failed, port_id=d542d712-22ed-45d7-bf6e-ce3ae5cf5556, reason: Instance 32d23c62-23ec-4732-a95d-6ac32805e1b9 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 997.163723] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891741, 'name': ReconfigVM_Task, 'duration_secs': 0.585466} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.164040] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 64103f25-6411-44be-a60f-b9c276dba331/64103f25-6411-44be-a60f-b9c276dba331.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.175877] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdd9c170-60c8-429e-8cbb-dce6eee47f07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.177980] env[68285]: DEBUG nova.compute.manager [req-a441e6d3-eecd-4245-bbfe-56e8849a1623 req-cb19eec8-1b01-4d3d-8d57-0ad702992faa service nova] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Detach interface failed, port_id=ee14be75-4848-4471-9d06-29e7a06446fd, reason: Instance 7bef3e2a-00ab-480a-aa8c-335635ee5d31 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 997.184995] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 997.184995] env[68285]: value = "task-2891748" [ 997.184995] env[68285]: _type = "Task" [ 997.184995] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.195901] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891748, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.210022] env[68285]: DEBUG oslo_vmware.api [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Task: {'id': task-2891745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173475} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.210501] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.210763] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 997.211014] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 997.211244] env[68285]: INFO nova.compute.manager [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 997.211531] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.211744] env[68285]: DEBUG nova.compute.manager [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 997.211841] env[68285]: DEBUG nova.network.neutron [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 997.236948] env[68285]: DEBUG nova.network.neutron [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 997.426057] env[68285]: DEBUG nova.network.neutron [-] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.535354] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70181c6e-2ae3-4d2e-8344-03f09ad7e9bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.542079] env[68285]: INFO nova.compute.manager [-] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Took 1.30 seconds to deallocate network for instance. [ 997.544403] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54314a5-c882-4752-8658-46f8089537ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.578522] env[68285]: INFO nova.compute.manager [-] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Took 1.49 seconds to deallocate network for instance. [ 997.582083] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b462e55f-dbd9-4a69-bdc2-caee784f5f47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.593072] env[68285]: DEBUG oslo_vmware.api [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891747, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157438} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.596045] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.596045] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 997.596245] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 997.596452] env[68285]: INFO nova.compute.manager [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 997.596715] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.597168] env[68285]: DEBUG nova.compute.manager [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 997.597168] env[68285]: DEBUG nova.network.neutron [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 997.600033] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec87c4a-6f6d-4da5-ac3c-19794e4bc1f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.618271] env[68285]: DEBUG nova.compute.provider_tree [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.621301] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.695856] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891748, 'name': Rename_Task, 'duration_secs': 0.14582} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.696159] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.696398] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9605abf0-0868-4435-a21e-558297986f8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.706017] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 997.706017] env[68285]: value = "task-2891749" [ 997.706017] env[68285]: _type = "Task" [ 997.706017] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.710870] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.738902] env[68285]: DEBUG nova.network.neutron [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.776840] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.778490] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.778490] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.778490] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.778490] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.780212] env[68285]: INFO nova.compute.manager [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Terminating instance [ 997.797516] env[68285]: DEBUG oslo_concurrency.lockutils [None req-687fb527-5f79-406b-930b-b14c8b75356e tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.277s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.826796] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.827082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.827351] env[68285]: DEBUG nova.compute.manager [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.828843] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9527e305-93cc-4904-b5b5-7cc894ebb5ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.836336] env[68285]: DEBUG nova.compute.manager [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 997.837011] env[68285]: DEBUG nova.objects.instance [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.929134] env[68285]: INFO nova.compute.manager [-] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Took 1.37 seconds to deallocate network for instance. [ 998.053689] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.089608] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.121414] env[68285]: DEBUG nova.scheduler.client.report [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.213044] env[68285]: DEBUG oslo_vmware.api [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891749, 'name': PowerOnVM_Task, 'duration_secs': 0.478791} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.213985] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.214199] env[68285]: INFO nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Took 7.65 seconds to spawn the instance on the hypervisor. [ 998.214394] env[68285]: DEBUG nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.215171] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4be693f-4618-4a7d-aed5-926b502b105f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.224842] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.224842] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.224979] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.225168] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.225338] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.227499] env[68285]: INFO nova.compute.manager [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Terminating instance [ 998.241144] env[68285]: INFO nova.compute.manager [-] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Took 1.03 seconds to deallocate network for instance. [ 998.285098] env[68285]: DEBUG nova.compute.manager [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 998.285098] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.286267] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499c072e-bbf3-4830-9b9b-9bd577ada13f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.296156] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.296227] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a1421af-937a-4fce-bb09-3f1a7f3d8915 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.302075] env[68285]: DEBUG oslo_vmware.api [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 998.302075] env[68285]: value = "task-2891750" [ 998.302075] env[68285]: _type = "Task" [ 998.302075] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.310061] env[68285]: DEBUG oslo_vmware.api [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.435892] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.461395] env[68285]: DEBUG nova.network.neutron [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.626854] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.627440] env[68285]: DEBUG nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 998.630067] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.002s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.630342] env[68285]: DEBUG nova.objects.instance [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lazy-loading 'resources' on Instance uuid e3b01f87-6a4c-4127-9204-2bfa5ff28f38 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.703113] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "34aeba05-804e-444c-8e58-69c7721b10b1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.703164] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.703347] env[68285]: DEBUG nova.compute.manager [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.704227] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55223928-73ce-4188-9d87-050b1a1aede7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.711104] env[68285]: DEBUG nova.compute.manager [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 998.711652] env[68285]: DEBUG nova.objects.instance [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'flavor' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.736429] env[68285]: DEBUG nova.compute.manager [req-3144d27b-1a14-4d77-87bc-7b21c772d61e req-fe040a7e-0a82-45bf-aadf-5851f6beb80f service nova] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Received event network-vif-deleted-08074d01-1b01-4ca8-a5ca-f427c8ec414e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 998.736644] env[68285]: DEBUG nova.compute.manager [req-3144d27b-1a14-4d77-87bc-7b21c772d61e req-fe040a7e-0a82-45bf-aadf-5851f6beb80f service nova] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Received event network-vif-deleted-0b7efc8c-8a7b-4401-86cd-f76e8836c2c4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 998.738830] env[68285]: DEBUG nova.compute.manager [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 998.739085] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.739906] env[68285]: INFO nova.compute.manager [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Took 54.70 seconds to build instance. [ 998.741318] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250f1d97-6c41-41e1-9888-c2bd142de5fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.746815] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.749439] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.749667] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93d4ed49-20d0-4f8b-9676-6c0a0946906d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.757941] env[68285]: DEBUG oslo_vmware.api [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 998.757941] env[68285]: value = "task-2891751" [ 998.757941] env[68285]: _type = "Task" [ 998.757941] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.765907] env[68285]: DEBUG oslo_vmware.api [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.812488] env[68285]: DEBUG oslo_vmware.api [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891750, 'name': PowerOffVM_Task, 'duration_secs': 0.269544} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.812488] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.812488] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.812488] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c2cc22c-eb16-4103-b30c-3130ce405711 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.844515] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.844618] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-068df66d-99a4-44c9-bdb1-d599dcf275ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.851028] env[68285]: DEBUG oslo_vmware.api [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 998.851028] env[68285]: value = "task-2891753" [ 998.851028] env[68285]: _type = "Task" [ 998.851028] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.859290] env[68285]: DEBUG oslo_vmware.api [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.875055] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.875055] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.875055] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleting the datastore file [datastore2] 1b9dd0e2-781f-43d7-a66e-e718a0972c78 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.875055] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-600a11f6-3de8-492a-be81-ebeb9623245d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.882657] env[68285]: DEBUG oslo_vmware.api [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 998.882657] env[68285]: value = "task-2891754" [ 998.882657] env[68285]: _type = "Task" [ 998.882657] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.891603] env[68285]: DEBUG oslo_vmware.api [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.967429] env[68285]: INFO nova.compute.manager [-] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Took 1.37 seconds to deallocate network for instance. [ 999.134438] env[68285]: DEBUG nova.compute.utils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 999.139542] env[68285]: DEBUG nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 999.139729] env[68285]: DEBUG nova.network.neutron [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.218737] env[68285]: DEBUG nova.policy [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fd0582abf8e4fff8e6f8316ba430988', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07b5865cc5804d8d98073e5d0c1449aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 999.246875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-683ae402-054b-4175-86c5-3fc7118ec828 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "64103f25-6411-44be-a60f-b9c276dba331" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.487s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.274212] env[68285]: DEBUG oslo_vmware.api [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891751, 'name': PowerOffVM_Task, 'duration_secs': 0.224133} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.274582] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 999.274872] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 999.275378] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d03bd6f4-3a0f-49c8-b97b-0389f3c1f6ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.344017] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 999.344258] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 999.344464] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Deleting the datastore file [datastore2] 52fbfbe4-1807-4d6d-9139-ebe30e6bf647 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 999.347169] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52ee83a8-3aab-4e25-b031-41c86407fff8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.356496] env[68285]: DEBUG oslo_vmware.api [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for the task: (returnval){ [ 999.356496] env[68285]: value = "task-2891756" [ 999.356496] env[68285]: _type = "Task" [ 999.356496] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.365251] env[68285]: DEBUG oslo_vmware.api [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891753, 'name': PowerOffVM_Task, 'duration_secs': 0.232024} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.365985] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 999.366206] env[68285]: DEBUG nova.compute.manager [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 999.366976] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bf00b0-7a2d-4ae1-9f0b-99a4a30d9529 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.372351] env[68285]: DEBUG oslo_vmware.api [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.392512] env[68285]: DEBUG oslo_vmware.api [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.40174} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.392710] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.392890] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.393078] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.393251] env[68285]: INFO nova.compute.manager [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Took 1.11 seconds to destroy the instance on the hypervisor. [ 999.393482] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.393667] env[68285]: DEBUG nova.compute.manager [-] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.393761] env[68285]: DEBUG nova.network.neutron [-] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 999.473471] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.634237] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c854058-6987-471a-8416-d9bc79a92dd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.639161] env[68285]: DEBUG nova.network.neutron [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Successfully created port: 4ab469f4-9da1-4748-ab22-7f86098988de {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.640303] env[68285]: DEBUG nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 999.647089] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96064edc-97db-481f-b1f5-19b985b835d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.680780] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f504dd8d-96e5-487c-ab41-4bd60d861079 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.694767] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9117f4a-b0eb-4157-9800-4719bf196eeb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.710807] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 999.726157] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.726429] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbe78a7d-d14a-434e-9886-9368f08ca8d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.733527] env[68285]: DEBUG oslo_vmware.api [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 999.733527] env[68285]: value = "task-2891757" [ 999.733527] env[68285]: _type = "Task" [ 999.733527] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.742836] env[68285]: DEBUG oslo_vmware.api [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.854362] env[68285]: DEBUG nova.compute.manager [req-dc3376c3-f821-468e-810a-9a6c27d38734 req-459265b5-b534-4006-8d6d-a8e3fb29c446 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Received event network-vif-deleted-25885f8e-e0d5-491d-a099-409ae53d20c1 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.854534] env[68285]: INFO nova.compute.manager [req-dc3376c3-f821-468e-810a-9a6c27d38734 req-459265b5-b534-4006-8d6d-a8e3fb29c446 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Neutron deleted interface 25885f8e-e0d5-491d-a099-409ae53d20c1; detaching it from the instance and deleting it from the info cache [ 999.855119] env[68285]: DEBUG nova.network.neutron [req-dc3376c3-f821-468e-810a-9a6c27d38734 req-459265b5-b534-4006-8d6d-a8e3fb29c446 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.867659] env[68285]: DEBUG oslo_vmware.api [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Task: {'id': task-2891756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226318} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.867912] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.868194] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.868378] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.868548] env[68285]: INFO nova.compute.manager [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Took 1.13 seconds to destroy the instance on the hypervisor. [ 999.868781] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.869236] env[68285]: DEBUG nova.compute.manager [-] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.869335] env[68285]: DEBUG nova.network.neutron [-] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 999.883603] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0eb6b58a-bf9b-458d-a2b8-69479cc75aa7 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.194434] env[68285]: DEBUG nova.network.neutron [-] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.244638] env[68285]: ERROR nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [req-8b93ff2d-7934-4e65-87cc-d087d34ae36d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8b93ff2d-7934-4e65-87cc-d087d34ae36d"}]} [ 1000.255623] env[68285]: DEBUG oslo_vmware.api [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891757, 'name': PowerOffVM_Task, 'duration_secs': 0.25865} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.256044] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.256309] env[68285]: DEBUG nova.compute.manager [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.257215] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ee7dcc-7671-4bd9-89dd-d11afd347347 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.267703] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1000.288778] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1000.288778] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1000.302389] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1000.324689] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1000.330019] env[68285]: DEBUG nova.compute.manager [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.330019] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e355c60-282a-42de-ab89-6925cf522a20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.359639] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74f61aab-d0ac-4db6-8cc9-ac8c31f084bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.370278] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60eba1a1-8a15-499d-884c-f932f2be854d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.382515] env[68285]: DEBUG nova.network.neutron [-] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.407396] env[68285]: DEBUG nova.compute.manager [req-dc3376c3-f821-468e-810a-9a6c27d38734 req-459265b5-b534-4006-8d6d-a8e3fb29c446 service nova] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Detach interface failed, port_id=25885f8e-e0d5-491d-a099-409ae53d20c1, reason: Instance 1b9dd0e2-781f-43d7-a66e-e718a0972c78 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1000.653635] env[68285]: DEBUG nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1000.679097] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1000.679359] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.679516] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.679696] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.679840] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.679982] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1000.680207] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1000.680384] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1000.680559] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1000.680718] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1000.680889] env[68285]: DEBUG nova.virt.hardware [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1000.681736] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae572823-e945-4d5e-bca5-88ed846ce46f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.692436] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c79c62a-e2d7-4f8e-b4ce-47cb7f41d051 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.698868] env[68285]: INFO nova.compute.manager [-] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Took 1.31 seconds to deallocate network for instance. [ 1000.752325] env[68285]: DEBUG nova.objects.instance [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.773989] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9d93451d-c13a-4f7c-a432-1db3b760493c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.071s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.807670] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d530ec3-6924-49c6-b100-87f858bbcde6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.815809] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49dca4f7-faeb-4f39-a609-5d1a522b1b1d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.848596] env[68285]: INFO nova.compute.manager [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] instance snapshotting [ 1000.851915] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10cd920-c067-4d0c-9520-0ab2e42df76b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.856019] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9f6b43-f095-4a1d-b678-170ebb188b43 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.879254] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d40c992-1b7b-4722-8613-ce5b009de933 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.883389] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07fb34e-316b-4e2a-bf58-844631321674 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.887405] env[68285]: INFO nova.compute.manager [-] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Took 1.02 seconds to deallocate network for instance. [ 1000.913549] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1001.206315] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.256995] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.257188] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.257370] env[68285]: DEBUG nova.network.neutron [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1001.257557] env[68285]: DEBUG nova.objects.instance [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'info_cache' on Instance uuid 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.420022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.420022] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1001.420022] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7dc7ec1b-0c5a-4c6b-bb4c-91fd2d816bde {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.429662] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1001.429662] env[68285]: value = "task-2891758" [ 1001.429662] env[68285]: _type = "Task" [ 1001.429662] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.445341] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891758, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.456525] env[68285]: ERROR nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [req-f80949ff-b42d-4919-84e8-dbdf7ddbb45d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n update conflict: Another thread concurrently updated the resource provider data. Please retry your update ", "code": "placement.concurrent_update", "request_id": "req-f80949ff-b42d-4919-84e8-dbdf7ddbb45d"}]} [ 1001.476285] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1001.493091] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1001.493402] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1001.506536] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1001.529936] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1001.760734] env[68285]: DEBUG nova.objects.base [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Object Instance<5e101d74-7a82-4118-8f4c-7af9a6b0917a> lazy-loaded attributes: flavor,info_cache {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1001.795866] env[68285]: DEBUG nova.network.neutron [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Successfully updated port: 4ab469f4-9da1-4748-ab22-7f86098988de {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1001.942219] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891758, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.964584] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cc5d16-1a56-41f2-a94e-2c638d21b262 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.973472] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccac8254-ec39-4978-8859-8f761a56b57a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.014536] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31041833-6444-4d7b-b841-460f9e3d9c48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.022331] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a49fca4-7958-4725-8841-180d11c524e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.028702] env[68285]: DEBUG nova.compute.manager [req-905b4aa1-2faf-490a-b6e8-e4935b79f130 req-b43f755c-24bb-4b4f-b238-c9f5e420887c service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Received event network-vif-plugged-4ab469f4-9da1-4748-ab22-7f86098988de {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.028917] env[68285]: DEBUG oslo_concurrency.lockutils [req-905b4aa1-2faf-490a-b6e8-e4935b79f130 req-b43f755c-24bb-4b4f-b238-c9f5e420887c service nova] Acquiring lock "3c71f649-b456-45a0-a113-725a529702a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.029461] env[68285]: DEBUG oslo_concurrency.lockutils [req-905b4aa1-2faf-490a-b6e8-e4935b79f130 req-b43f755c-24bb-4b4f-b238-c9f5e420887c service nova] Lock "3c71f649-b456-45a0-a113-725a529702a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.029461] env[68285]: DEBUG oslo_concurrency.lockutils [req-905b4aa1-2faf-490a-b6e8-e4935b79f130 req-b43f755c-24bb-4b4f-b238-c9f5e420887c service nova] Lock "3c71f649-b456-45a0-a113-725a529702a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.029560] env[68285]: DEBUG nova.compute.manager [req-905b4aa1-2faf-490a-b6e8-e4935b79f130 req-b43f755c-24bb-4b4f-b238-c9f5e420887c service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] No waiting events found dispatching network-vif-plugged-4ab469f4-9da1-4748-ab22-7f86098988de {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1002.029726] env[68285]: WARNING nova.compute.manager [req-905b4aa1-2faf-490a-b6e8-e4935b79f130 req-b43f755c-24bb-4b4f-b238-c9f5e420887c service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Received unexpected event network-vif-plugged-4ab469f4-9da1-4748-ab22-7f86098988de for instance with vm_state building and task_state spawning. [ 1002.040580] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1002.156346] env[68285]: DEBUG nova.compute.manager [req-3fd336cc-7da4-4e25-a713-427453bcc978 req-f2547bd2-78a0-4ec0-8015-552c2b401dd2 service nova] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Received event network-vif-deleted-ae1e3da0-addf-4feb-83f8-8a52e6a74a39 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.298382] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "refresh_cache-3c71f649-b456-45a0-a113-725a529702a2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.298382] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "refresh_cache-3c71f649-b456-45a0-a113-725a529702a2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.298382] env[68285]: DEBUG nova.network.neutron [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.440729] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891758, 'name': CreateSnapshot_Task, 'duration_secs': 0.690893} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.441042] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1002.441743] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb0d58d-2f07-4852-bdfe-5d8c0fc6cac9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.489168] env[68285]: DEBUG nova.compute.manager [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Stashing vm_state: stopped {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1002.504099] env[68285]: DEBUG nova.network.neutron [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updating instance_info_cache with network_info: [{"id": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "address": "fa:16:3e:c7:6a:21", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462b7f0c-cb", "ovs_interfaceid": "462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.563209] env[68285]: ERROR nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] [req-c0338864-42c5-4407-987d-999118045485] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c0338864-42c5-4407-987d-999118045485"}]} [ 1002.580546] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1002.595977] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1002.596272] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1002.609352] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1002.628853] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1002.843237] env[68285]: DEBUG nova.network.neutron [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.961920] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1002.962371] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0499c2f3-5e9a-403d-a5f3-cbb3b71587a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.973910] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1002.973910] env[68285]: value = "task-2891759" [ 1002.973910] env[68285]: _type = "Task" [ 1002.973910] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.981953] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891759, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.006324] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "refresh_cache-5e101d74-7a82-4118-8f4c-7af9a6b0917a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.011344] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.091496] env[68285]: DEBUG nova.network.neutron [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Updating instance_info_cache with network_info: [{"id": "4ab469f4-9da1-4748-ab22-7f86098988de", "address": "fa:16:3e:7d:3d:d4", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ab469f4-9d", "ovs_interfaceid": "4ab469f4-9da1-4748-ab22-7f86098988de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.119677] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ccf040-7bbc-415c-b05a-e386c108c118 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.128443] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca58d4f4-c9e0-444f-b093-4e7f7a380afb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.162923] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e3389f-be03-4edd-a4d9-9daa39cfca39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.170765] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a847d7-d0dc-4ca3-bbe9-e2768a24c592 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.184956] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1003.486091] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891759, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.594566] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "refresh_cache-3c71f649-b456-45a0-a113-725a529702a2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.595026] env[68285]: DEBUG nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Instance network_info: |[{"id": "4ab469f4-9da1-4748-ab22-7f86098988de", "address": "fa:16:3e:7d:3d:d4", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ab469f4-9d", "ovs_interfaceid": "4ab469f4-9da1-4748-ab22-7f86098988de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1003.595557] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:3d:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ab469f4-9da1-4748-ab22-7f86098988de', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1003.603851] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1003.604163] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1003.604414] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82a3430a-a9e0-4b0a-8c7e-66a9520dd49d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.625601] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1003.625601] env[68285]: value = "task-2891760" [ 1003.625601] env[68285]: _type = "Task" [ 1003.625601] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.633501] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891760, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.725528] env[68285]: DEBUG nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1003.725938] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 87 to 88 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1003.726278] env[68285]: DEBUG nova.compute.provider_tree [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1003.986941] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891759, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.011912] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.012421] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f10db9e-e8a7-4027-b8eb-5cd7f9e95320 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.018741] env[68285]: DEBUG oslo_vmware.api [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1004.018741] env[68285]: value = "task-2891761" [ 1004.018741] env[68285]: _type = "Task" [ 1004.018741] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.026856] env[68285]: DEBUG oslo_vmware.api [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891761, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.050379] env[68285]: DEBUG nova.compute.manager [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Received event network-changed-4ab469f4-9da1-4748-ab22-7f86098988de {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1004.050559] env[68285]: DEBUG nova.compute.manager [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Refreshing instance network info cache due to event network-changed-4ab469f4-9da1-4748-ab22-7f86098988de. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1004.050801] env[68285]: DEBUG oslo_concurrency.lockutils [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] Acquiring lock "refresh_cache-3c71f649-b456-45a0-a113-725a529702a2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.050926] env[68285]: DEBUG oslo_concurrency.lockutils [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] Acquired lock "refresh_cache-3c71f649-b456-45a0-a113-725a529702a2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.051098] env[68285]: DEBUG nova.network.neutron [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Refreshing network info cache for port 4ab469f4-9da1-4748-ab22-7f86098988de {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.134975] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891760, 'name': CreateVM_Task, 'duration_secs': 0.450314} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.135181] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.135934] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.136152] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.136482] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1004.136831] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3c7ff53-c21a-4cb5-b7a7-d139c9778a85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.141375] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1004.141375] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a804f4-873b-1661-18d1-13e20d30ee34" [ 1004.141375] env[68285]: _type = "Task" [ 1004.141375] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.149768] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a804f4-873b-1661-18d1-13e20d30ee34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.232824] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.602s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.234770] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.657s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.236291] env[68285]: INFO nova.compute.claims [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.256383] env[68285]: INFO nova.scheduler.client.report [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Deleted allocations for instance e3b01f87-6a4c-4127-9204-2bfa5ff28f38 [ 1004.486330] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891759, 'name': CloneVM_Task, 'duration_secs': 1.201006} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.486330] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Created linked-clone VM from snapshot [ 1004.486791] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fc7a1d-deff-4726-8020-92ce303f6c5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.495367] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Uploading image 50e98f63-add8-4c68-a345-ebd51704c9a8 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1004.507519] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1004.507786] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ae02770c-e5f9-45cc-b5c5-2192b4818803 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.513413] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1004.513413] env[68285]: value = "task-2891762" [ 1004.513413] env[68285]: _type = "Task" [ 1004.513413] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.523229] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891762, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.530962] env[68285]: DEBUG oslo_vmware.api [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891761, 'name': PowerOnVM_Task, 'duration_secs': 0.41209} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.531257] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.531457] env[68285]: DEBUG nova.compute.manager [None req-dcb77a32-c959-4987-8cc1-0f50dbb18bc5 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1004.532210] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cbffb3-5834-4ccc-a1d4-e117864dfea6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.651112] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a804f4-873b-1661-18d1-13e20d30ee34, 'name': SearchDatastore_Task, 'duration_secs': 0.012583} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.654174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.654426] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.654663] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.654804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.654981] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.655263] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64a1cf7f-8b64-4ce1-a73a-5d8dc3d10141 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.663846] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.664045] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.664773] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a993be0-bfd2-4e7b-911f-22e997a86a05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.669939] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1004.669939] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5258c2f2-7376-e86e-631e-6a805562ca2d" [ 1004.669939] env[68285]: _type = "Task" [ 1004.669939] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.679087] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5258c2f2-7376-e86e-631e-6a805562ca2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.763554] env[68285]: DEBUG oslo_concurrency.lockutils [None req-24136ce4-9b5b-480a-b396-66a56ec11d72 tempest-SecurityGroupsTestJSON-538611863 tempest-SecurityGroupsTestJSON-538611863-project-member] Lock "e3b01f87-6a4c-4127-9204-2bfa5ff28f38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.754s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.887032] env[68285]: DEBUG nova.network.neutron [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Updated VIF entry in instance network info cache for port 4ab469f4-9da1-4748-ab22-7f86098988de. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.887410] env[68285]: DEBUG nova.network.neutron [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Updating instance_info_cache with network_info: [{"id": "4ab469f4-9da1-4748-ab22-7f86098988de", "address": "fa:16:3e:7d:3d:d4", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ab469f4-9d", "ovs_interfaceid": "4ab469f4-9da1-4748-ab22-7f86098988de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.028792] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891762, 'name': Destroy_Task, 'duration_secs': 0.421913} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.029154] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Destroyed the VM [ 1005.029384] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1005.029622] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-325adb28-856a-4698-be58-5ff33d28d74d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.036644] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1005.036644] env[68285]: value = "task-2891763" [ 1005.036644] env[68285]: _type = "Task" [ 1005.036644] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.049262] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891763, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.180276] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5258c2f2-7376-e86e-631e-6a805562ca2d, 'name': SearchDatastore_Task, 'duration_secs': 0.008779} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.181130] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bc5db02-7f94-4c45-8487-475e39e0b537 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.186936] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1005.186936] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e9f935-8454-cdf9-5af7-3cf3fa7ae7d7" [ 1005.186936] env[68285]: _type = "Task" [ 1005.186936] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.196965] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e9f935-8454-cdf9-5af7-3cf3fa7ae7d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.390204] env[68285]: DEBUG oslo_concurrency.lockutils [req-9b4f5b26-0678-4a91-911d-5102992ca2dd req-75136807-c9b0-492f-a46d-7d90c8099935 service nova] Releasing lock "refresh_cache-3c71f649-b456-45a0-a113-725a529702a2" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.547964] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891763, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.703787] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e9f935-8454-cdf9-5af7-3cf3fa7ae7d7, 'name': SearchDatastore_Task, 'duration_secs': 0.013475} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.704458] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.704458] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 3c71f649-b456-45a0-a113-725a529702a2/3c71f649-b456-45a0-a113-725a529702a2.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.704603] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-116d7312-66ea-49d7-84f2-73e71759212d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.713559] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1005.713559] env[68285]: value = "task-2891764" [ 1005.713559] env[68285]: _type = "Task" [ 1005.713559] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.725587] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.741696] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad1f4ed-0b42-4a73-bd1a-5d5f1ef533fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.748454] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8833a6bd-9487-449c-a635-98343bbff648 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.780251] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ea8b60-b68a-4231-86b8-442a790cf490 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.787344] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859dc0f1-61cd-49c0-9fa0-db6a937c98a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.800774] env[68285]: DEBUG nova.compute.provider_tree [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.047963] env[68285]: DEBUG oslo_vmware.api [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891763, 'name': RemoveSnapshot_Task, 'duration_secs': 0.609941} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.048357] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1006.226580] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891764, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491825} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.227224] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 3c71f649-b456-45a0-a113-725a529702a2/3c71f649-b456-45a0-a113-725a529702a2.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.227454] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.227881] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aad162a3-8c70-47bf-8be8-916ee6d4fc1d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.234736] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1006.234736] env[68285]: value = "task-2891765" [ 1006.234736] env[68285]: _type = "Task" [ 1006.234736] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.244010] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891765, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.305157] env[68285]: DEBUG nova.scheduler.client.report [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.552986] env[68285]: WARNING nova.compute.manager [None req-1cdb2347-ebca-479e-ad46-70bb372a7929 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Image not found during snapshot: nova.exception.ImageNotFound: Image 50e98f63-add8-4c68-a345-ebd51704c9a8 could not be found. [ 1006.747746] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891765, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072639} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.748340] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.749510] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075b485e-e7a7-430d-8a8b-6493c90bfe4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.775663] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 3c71f649-b456-45a0-a113-725a529702a2/3c71f649-b456-45a0-a113-725a529702a2.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.776150] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b534a28a-0c20-4fb9-9dad-ff7af5a2122d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.798066] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1006.798066] env[68285]: value = "task-2891766" [ 1006.798066] env[68285]: _type = "Task" [ 1006.798066] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.806165] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891766, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.810079] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.810573] env[68285]: DEBUG nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1006.813518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 49.457s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.813518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.813711] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1006.814195] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.890s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.814452] env[68285]: DEBUG nova.objects.instance [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lazy-loading 'resources' on Instance uuid 8b473550-4a40-48a5-9e1c-7c48df828e61 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.816785] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660b6b3a-da66-4a32-80b4-e10af94930e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.826329] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e70678-4378-416d-9118-66b20042596e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.842772] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ed490d-3941-4d28-93c8-faabf10bffaf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.849441] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d37b45-9cb6-446b-a7a0-97fca872bf69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.879403] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178410MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1006.879557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.310515] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.320127] env[68285]: DEBUG nova.compute.utils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1007.321823] env[68285]: DEBUG nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1007.736758] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d788142-a5be-4e84-83ed-6688b1f51b7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.745017] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c9e880-ef40-4721-9355-6756f0964ee5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.775764] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a17676-005f-4380-91bd-625a8e0fdc6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.783731] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9aec35-fa96-4ac3-9b16-2e870a0a66e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.797758] env[68285]: DEBUG nova.compute.provider_tree [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1007.807484] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.826024] env[68285]: DEBUG nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1007.946634] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "64103f25-6411-44be-a60f-b9c276dba331" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.946634] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "64103f25-6411-44be-a60f-b9c276dba331" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.946634] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "64103f25-6411-44be-a60f-b9c276dba331-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.946634] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "64103f25-6411-44be-a60f-b9c276dba331-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.946634] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "64103f25-6411-44be-a60f-b9c276dba331-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.949597] env[68285]: INFO nova.compute.manager [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Terminating instance [ 1008.311673] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891766, 'name': ReconfigVM_Task, 'duration_secs': 1.169097} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.311955] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 3c71f649-b456-45a0-a113-725a529702a2/3c71f649-b456-45a0-a113-725a529702a2.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.312570] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54a46d79-7e3c-4086-b2f4-1df8a58b8988 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.318729] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1008.318729] env[68285]: value = "task-2891767" [ 1008.318729] env[68285]: _type = "Task" [ 1008.318729] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.327026] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891767, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.339246] env[68285]: DEBUG nova.scheduler.client.report [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1008.339508] env[68285]: DEBUG nova.compute.provider_tree [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 88 to 89 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1008.339687] env[68285]: DEBUG nova.compute.provider_tree [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1008.454147] env[68285]: DEBUG nova.compute.manager [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.454395] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.455304] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f825e13-c24e-4197-b8b4-6eba49bc0732 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.469561] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.469849] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9299558-6eb5-4432-aabb-69e053c20cb4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.478250] env[68285]: DEBUG oslo_vmware.api [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1008.478250] env[68285]: value = "task-2891768" [ 1008.478250] env[68285]: _type = "Task" [ 1008.478250] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.486942] env[68285]: DEBUG oslo_vmware.api [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891768, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.829891] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891767, 'name': Rename_Task, 'duration_secs': 0.156487} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.830196] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.830282] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df3523c1-0ecf-4b59-96b4-27c532df260a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.837233] env[68285]: DEBUG nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1008.842520] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1008.842520] env[68285]: value = "task-2891769" [ 1008.842520] env[68285]: _type = "Task" [ 1008.842520] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.850804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.852959] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.854411] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.498s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.856666] env[68285]: INFO nova.compute.claims [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.879963] env[68285]: INFO nova.scheduler.client.report [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Deleted allocations for instance 8b473550-4a40-48a5-9e1c-7c48df828e61 [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1008.884488] env[68285]: DEBUG nova.virt.hardware [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1008.885710] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a39d063-29d1-4256-ba36-01617fbeba3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.900059] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ca2c0e-2824-45f6-a13d-587e8762f415 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.915146] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1008.921386] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Creating folder: Project (6a0b79d1c70449b58c9503e388524025). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1008.922191] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c97d3de-d2e1-44a9-9e1f-f3ff83a6f410 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.934097] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Created folder: Project (6a0b79d1c70449b58c9503e388524025) in parent group-v580775. [ 1008.934232] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Creating folder: Instances. Parent ref: group-v580952. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1008.934646] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d710c0dc-a8d6-44ea-b8e9-31a19ba948b2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.943879] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Created folder: Instances in parent group-v580952. [ 1008.944139] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1008.944322] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1008.944522] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93ea8229-5798-4534-bf24-d97336aa4e5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.962354] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1008.962354] env[68285]: value = "task-2891772" [ 1008.962354] env[68285]: _type = "Task" [ 1008.962354] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.970281] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891772, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.988106] env[68285]: DEBUG oslo_vmware.api [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891768, 'name': PowerOffVM_Task, 'duration_secs': 0.215861} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.988106] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.988300] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.988680] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39d449f8-2f9e-4cfe-9313-be3f55b2c7c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.060792] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.061027] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.061220] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleting the datastore file [datastore2] 64103f25-6411-44be-a60f-b9c276dba331 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.061519] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c438c96-3470-428c-86f4-0b61eb4d8ce1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.072635] env[68285]: DEBUG oslo_vmware.api [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1009.072635] env[68285]: value = "task-2891774" [ 1009.072635] env[68285]: _type = "Task" [ 1009.072635] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.081650] env[68285]: DEBUG oslo_vmware.api [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.352724] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891769, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.399445] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b9bfcae9-27d6-4f1c-8305-49ab789a5614 tempest-ServersTestMultiNic-1707196865 tempest-ServersTestMultiNic-1707196865-project-member] Lock "8b473550-4a40-48a5-9e1c-7c48df828e61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.591s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.473294] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891772, 'name': CreateVM_Task, 'duration_secs': 0.272039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.473508] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1009.473954] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.474131] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.474438] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1009.475337] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69cae7da-0c8a-42b6-83ef-a6f0efd94691 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.480562] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1009.480562] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f61e02-14f3-bcbf-d048-8aa53d591b34" [ 1009.480562] env[68285]: _type = "Task" [ 1009.480562] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.488275] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f61e02-14f3-bcbf-d048-8aa53d591b34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.584268] env[68285]: DEBUG oslo_vmware.api [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142482} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.584623] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.584842] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.585073] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.585292] env[68285]: INFO nova.compute.manager [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1009.585586] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.585832] env[68285]: DEBUG nova.compute.manager [-] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.585986] env[68285]: DEBUG nova.network.neutron [-] [instance: 64103f25-6411-44be-a60f-b9c276dba331] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.855150] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891769, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.000699] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f61e02-14f3-bcbf-d048-8aa53d591b34, 'name': SearchDatastore_Task, 'duration_secs': 0.009965} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.001301] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.001584] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1010.001781] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.001927] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.002125] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.002403] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbd782b0-45be-4240-b40d-efde6f3d58e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.011957] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.014024] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1010.014024] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4024c5e1-26ba-403e-b177-cfdfba58443b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.018865] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1010.018865] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52db6432-8fed-556a-24d6-ca3f18b3e51d" [ 1010.018865] env[68285]: _type = "Task" [ 1010.018865] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.031715] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52db6432-8fed-556a-24d6-ca3f18b3e51d, 'name': SearchDatastore_Task, 'duration_secs': 0.009969} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.032766] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8be318b2-9546-43c8-98b0-47b478c9f53f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.037865] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1010.037865] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521a8103-822b-35a2-b0cd-6ada8a525c90" [ 1010.037865] env[68285]: _type = "Task" [ 1010.037865] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.050194] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521a8103-822b-35a2-b0cd-6ada8a525c90, 'name': SearchDatastore_Task, 'duration_secs': 0.008268} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.050657] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.050957] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1010.051858] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0a22c2c-b525-48d5-aa8f-3de9cc9d77ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.059581] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1010.059581] env[68285]: value = "task-2891775" [ 1010.059581] env[68285]: _type = "Task" [ 1010.059581] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.067058] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891775, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.138668] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.138890] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.169305] env[68285]: DEBUG nova.compute.manager [req-0c18d1ae-2847-43ee-9d54-da66ec56eaf6 req-037f3434-f3f5-499d-b563-034e748caeb5 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Received event network-vif-deleted-efc7ff98-8b15-4f2b-9c65-16a914ff393a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1010.169526] env[68285]: INFO nova.compute.manager [req-0c18d1ae-2847-43ee-9d54-da66ec56eaf6 req-037f3434-f3f5-499d-b563-034e748caeb5 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Neutron deleted interface efc7ff98-8b15-4f2b-9c65-16a914ff393a; detaching it from the instance and deleting it from the info cache [ 1010.169695] env[68285]: DEBUG nova.network.neutron [req-0c18d1ae-2847-43ee-9d54-da66ec56eaf6 req-037f3434-f3f5-499d-b563-034e748caeb5 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.356261] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891769, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.364371] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f41ba4-bb24-41f1-827e-dc096317d4a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.372031] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dde8bf-63d2-4e5f-9ade-cf04018a14f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.401854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f638ee05-5c4a-4c92-9ba4-d9b8a35cfb66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.409570] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ff0216-a132-4fad-8927-12b81d8dcb10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.423369] env[68285]: DEBUG nova.compute.provider_tree [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1010.494411] env[68285]: DEBUG nova.network.neutron [-] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.570397] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891775, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.644144] env[68285]: DEBUG nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1010.674672] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-959997ef-b53a-4053-92c9-dd06a81879e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.691618] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85361143-f562-44f3-9bce-fb258e2d5b4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.733412] env[68285]: DEBUG nova.compute.manager [req-0c18d1ae-2847-43ee-9d54-da66ec56eaf6 req-037f3434-f3f5-499d-b563-034e748caeb5 service nova] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Detach interface failed, port_id=efc7ff98-8b15-4f2b-9c65-16a914ff393a, reason: Instance 64103f25-6411-44be-a60f-b9c276dba331 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1010.858253] env[68285]: DEBUG oslo_vmware.api [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891769, 'name': PowerOnVM_Task, 'duration_secs': 1.827797} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.858253] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.858253] env[68285]: INFO nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Took 10.20 seconds to spawn the instance on the hypervisor. [ 1010.858253] env[68285]: DEBUG nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.858253] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26e4b92-2f44-49ba-94b5-f5c03d006508 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.949625] env[68285]: ERROR nova.scheduler.client.report [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [req-3d21f14b-3b1c-45b3-a86d-4426fbdc4106] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3d21f14b-3b1c-45b3-a86d-4426fbdc4106"}]} [ 1010.969186] env[68285]: DEBUG nova.scheduler.client.report [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1010.988305] env[68285]: DEBUG nova.scheduler.client.report [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1010.988538] env[68285]: DEBUG nova.compute.provider_tree [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1010.996994] env[68285]: INFO nova.compute.manager [-] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Took 1.41 seconds to deallocate network for instance. [ 1011.006621] env[68285]: DEBUG nova.scheduler.client.report [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1011.030716] env[68285]: DEBUG nova.scheduler.client.report [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1011.075669] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891775, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543771} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.075669] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1011.075669] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1011.075669] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0589f28-0e89-455c-8e77-c87332c038c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.086964] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1011.086964] env[68285]: value = "task-2891776" [ 1011.086964] env[68285]: _type = "Task" [ 1011.086964] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.097580] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.171376] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.380213] env[68285]: INFO nova.compute.manager [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Took 56.84 seconds to build instance. [ 1011.504208] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.551908] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab0c63a-822c-42af-ab98-d48fb77af7fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.560012] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7de500a-815e-4d17-b4d0-8d08e3c469ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.598441] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac2c6e5-cfd8-4fbc-91d3-3df46bc9b875 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.605456] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071579} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.608170] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.610499] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c77660-9917-42b7-9519-9c532e12dbf5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.612078] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a824b1-3c69-4796-81c2-0dd50e12bb28 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.626104] env[68285]: DEBUG nova.compute.provider_tree [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.643110] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.644310] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eaa9f03-0807-452b-8a4f-50ea561d2b6a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.664513] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1011.664513] env[68285]: value = "task-2891777" [ 1011.664513] env[68285]: _type = "Task" [ 1011.664513] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.672876] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.775223] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1873fd21-62eb-4194-a79c-b05b1e5f5b81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.782133] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dfe92a1b-88af-4561-bb1f-9d7604d3ed74 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Suspending the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1011.784380] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b7bd6069-7fa7-434d-9a9b-2c18b4d1870c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.789231] env[68285]: DEBUG oslo_vmware.api [None req-dfe92a1b-88af-4561-bb1f-9d7604d3ed74 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1011.789231] env[68285]: value = "task-2891778" [ 1011.789231] env[68285]: _type = "Task" [ 1011.789231] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.797762] env[68285]: DEBUG oslo_vmware.api [None req-dfe92a1b-88af-4561-bb1f-9d7604d3ed74 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891778, 'name': SuspendVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.882682] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fd22d0d0-ea4e-4f58-ab8b-6d55caa1117f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "3c71f649-b456-45a0-a113-725a529702a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.564s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.145029] env[68285]: DEBUG nova.scheduler.client.report [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.176628] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.302932] env[68285]: DEBUG oslo_vmware.api [None req-dfe92a1b-88af-4561-bb1f-9d7604d3ed74 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891778, 'name': SuspendVM_Task} progress is 62%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.650373] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.797s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.650918] env[68285]: DEBUG nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1012.653532] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.413s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.654878] env[68285]: INFO nova.compute.claims [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.674275] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891777, 'name': ReconfigVM_Task, 'duration_secs': 0.845484} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.674539] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.675133] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76218d63-0040-4d03-af58-5e21c475923b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.681863] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1012.681863] env[68285]: value = "task-2891779" [ 1012.681863] env[68285]: _type = "Task" [ 1012.681863] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.690119] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891779, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.800099] env[68285]: DEBUG oslo_vmware.api [None req-dfe92a1b-88af-4561-bb1f-9d7604d3ed74 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891778, 'name': SuspendVM_Task, 'duration_secs': 0.750686} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.800423] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dfe92a1b-88af-4561-bb1f-9d7604d3ed74 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Suspended the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1012.800651] env[68285]: DEBUG nova.compute.manager [None req-dfe92a1b-88af-4561-bb1f-9d7604d3ed74 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.801506] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158589a7-2ff4-40e9-818b-6eb070a4a772 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.159728] env[68285]: DEBUG nova.compute.utils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1013.166684] env[68285]: DEBUG nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1013.166909] env[68285]: DEBUG nova.network.neutron [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1013.194889] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891779, 'name': Rename_Task, 'duration_secs': 0.125789} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.196062] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.196437] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-961ee317-1b46-421f-b290-a95a479aed95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.203719] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1013.203719] env[68285]: value = "task-2891780" [ 1013.203719] env[68285]: _type = "Task" [ 1013.203719] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.214235] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891780, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.233289] env[68285]: DEBUG nova.policy [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e92d773e0a524f379ea33b8073473b10', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6db9f990e5244cab8bf41ddba01bf85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1013.623417] env[68285]: DEBUG nova.network.neutron [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Successfully created port: bb6fa717-2a9e-4638-a045-be1c5301c96b {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.667639] env[68285]: DEBUG nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1013.719357] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891780, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.167953] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73e2d97-dc4e-4475-b837-cc330d1ee5fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.173988] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a65416c-52a0-452d-8a81-300741182cd3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.214993] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5c177e-42f0-44c1-8dd5-c7da1c7038af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.224124] env[68285]: DEBUG oslo_vmware.api [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891780, 'name': PowerOnVM_Task, 'duration_secs': 0.687603} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.227087] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.227399] env[68285]: INFO nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Took 5.39 seconds to spawn the instance on the hypervisor. [ 1014.227627] env[68285]: DEBUG nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.229224] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78abd9ba-18c2-4458-8aa3-7d6d2af53262 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.233542] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d6e20a-5038-49e8-83c7-82f58720f8d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.256676] env[68285]: DEBUG nova.compute.provider_tree [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.682923] env[68285]: DEBUG nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1014.704136] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1014.705275] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1014.705275] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1014.705275] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1014.705275] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1014.705275] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1014.705275] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1014.705505] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1014.705505] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1014.705652] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1014.705818] env[68285]: DEBUG nova.virt.hardware [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1014.706691] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84af3f29-c316-4d25-a85b-3ac08c9ce960 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.714924] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c0feb2-af0a-462a-befc-ac5fa4ca9357 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.771108] env[68285]: DEBUG nova.scheduler.client.report [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.775892] env[68285]: INFO nova.compute.manager [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Took 58.24 seconds to build instance. [ 1015.281967] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.281967] env[68285]: DEBUG nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1015.282655] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.227s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.282965] env[68285]: DEBUG nova.objects.instance [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lazy-loading 'resources' on Instance uuid 14285f6e-10a4-4077-a666-3c8d0cc1b87c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.284826] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ed5d3212-f5ce-447e-b44c-af815f11e6a0 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "9e81990d-e63e-48a7-8941-f0298ca184b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.997s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.548442] env[68285]: DEBUG nova.compute.manager [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.549433] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14288a17-06c1-425f-96f6-d52e0f3bdd27 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.574631] env[68285]: DEBUG nova.compute.manager [req-93cff444-ea3f-4329-a69a-ab57392261f3 req-e7eb29c6-ad9a-4daa-91a9-120935bdfddd service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Received event network-vif-plugged-bb6fa717-2a9e-4638-a045-be1c5301c96b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.574924] env[68285]: DEBUG oslo_concurrency.lockutils [req-93cff444-ea3f-4329-a69a-ab57392261f3 req-e7eb29c6-ad9a-4daa-91a9-120935bdfddd service nova] Acquiring lock "1dce61a2-0fe2-4384-835c-7e324446d7cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.575098] env[68285]: DEBUG oslo_concurrency.lockutils [req-93cff444-ea3f-4329-a69a-ab57392261f3 req-e7eb29c6-ad9a-4daa-91a9-120935bdfddd service nova] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.575226] env[68285]: DEBUG oslo_concurrency.lockutils [req-93cff444-ea3f-4329-a69a-ab57392261f3 req-e7eb29c6-ad9a-4daa-91a9-120935bdfddd service nova] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.575442] env[68285]: DEBUG nova.compute.manager [req-93cff444-ea3f-4329-a69a-ab57392261f3 req-e7eb29c6-ad9a-4daa-91a9-120935bdfddd service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] No waiting events found dispatching network-vif-plugged-bb6fa717-2a9e-4638-a045-be1c5301c96b {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1015.575630] env[68285]: WARNING nova.compute.manager [req-93cff444-ea3f-4329-a69a-ab57392261f3 req-e7eb29c6-ad9a-4daa-91a9-120935bdfddd service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Received unexpected event network-vif-plugged-bb6fa717-2a9e-4638-a045-be1c5301c96b for instance with vm_state building and task_state spawning. [ 1015.623056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.623313] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.657677] env[68285]: DEBUG nova.network.neutron [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Successfully updated port: bb6fa717-2a9e-4638-a045-be1c5301c96b {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.790616] env[68285]: DEBUG nova.compute.utils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1015.790616] env[68285]: DEBUG nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1016.061955] env[68285]: INFO nova.compute.manager [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] instance snapshotting [ 1016.063021] env[68285]: WARNING nova.compute.manager [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1016.067112] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbc2037-1cc1-44c7-83fe-b45c18845799 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.091273] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeea2c26-e84e-426d-ab2d-e7e3c8be1c4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.125769] env[68285]: DEBUG nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1016.159515] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "refresh_cache-1dce61a2-0fe2-4384-835c-7e324446d7cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.160335] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquired lock "refresh_cache-1dce61a2-0fe2-4384-835c-7e324446d7cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.160335] env[68285]: DEBUG nova.network.neutron [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1016.292979] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151f2e06-ff64-435f-9dfb-29299acab46f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.295737] env[68285]: DEBUG nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1016.307020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10629117-83d9-4e38-94ef-a97dfaa6381c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.341668] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293bd0aa-a90a-4dff-a8ac-e7bfd6c91ad8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.349892] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db05c105-e875-4f67-838d-d04d5e7cf3b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.364057] env[68285]: DEBUG nova.compute.provider_tree [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.373465] env[68285]: INFO nova.compute.manager [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Rebuilding instance [ 1016.420184] env[68285]: DEBUG nova.compute.manager [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1016.421070] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b62e2f7-6ab3-4b06-baeb-ca857043dc61 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.602484] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1016.602803] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b275310c-2e7c-4382-8447-a5393ec4e5a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.610410] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1016.610410] env[68285]: value = "task-2891781" [ 1016.610410] env[68285]: _type = "Task" [ 1016.610410] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.618778] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891781, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.644631] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.711393] env[68285]: DEBUG nova.network.neutron [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1016.869022] env[68285]: DEBUG nova.scheduler.client.report [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.909534] env[68285]: DEBUG nova.network.neutron [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Updating instance_info_cache with network_info: [{"id": "bb6fa717-2a9e-4638-a045-be1c5301c96b", "address": "fa:16:3e:75:1c:2e", "network": {"id": "48e6d83c-16a8-40a0-8ba2-8281c0a5fa97", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1959021458-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6db9f990e5244cab8bf41ddba01bf85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb6fa717-2a", "ovs_interfaceid": "bb6fa717-2a9e-4638-a045-be1c5301c96b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.120301] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891781, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.306837] env[68285]: DEBUG nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1017.328361] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1017.328613] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1017.328771] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1017.328952] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1017.329117] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1017.329266] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1017.329472] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1017.329632] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1017.329838] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1017.330014] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1017.330195] env[68285]: DEBUG nova.virt.hardware [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1017.331196] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f778c6-e24e-47a5-85dd-0182433a7464 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.340227] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829eb4f8-79b6-490f-be75-549bbeb2da3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.354046] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1017.359933] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Creating folder: Project (ea372d3a29a64b6caf9154d656a6eee7). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.360212] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2664e2f7-882c-4ddd-a2d9-5b4a6a743144 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.370628] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Created folder: Project (ea372d3a29a64b6caf9154d656a6eee7) in parent group-v580775. [ 1017.370810] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Creating folder: Instances. Parent ref: group-v580955. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.371038] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-224e2edd-6cf7-4c32-9d00-04ff06aead8a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.374324] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.092s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.377212] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 47.175s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.379870] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Created folder: Instances in parent group-v580955. [ 1017.380112] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.380502] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1017.380700] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22bb3966-a330-4027-9997-01c9dc0f13e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.397400] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1017.397400] env[68285]: value = "task-2891784" [ 1017.397400] env[68285]: _type = "Task" [ 1017.397400] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.401817] env[68285]: INFO nova.scheduler.client.report [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Deleted allocations for instance 14285f6e-10a4-4077-a666-3c8d0cc1b87c [ 1017.409035] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891784, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.411897] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Releasing lock "refresh_cache-1dce61a2-0fe2-4384-835c-7e324446d7cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.412200] env[68285]: DEBUG nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Instance network_info: |[{"id": "bb6fa717-2a9e-4638-a045-be1c5301c96b", "address": "fa:16:3e:75:1c:2e", "network": {"id": "48e6d83c-16a8-40a0-8ba2-8281c0a5fa97", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1959021458-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6db9f990e5244cab8bf41ddba01bf85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb6fa717-2a", "ovs_interfaceid": "bb6fa717-2a9e-4638-a045-be1c5301c96b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1017.412612] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:1c:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb6fa717-2a9e-4638-a045-be1c5301c96b', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1017.419902] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Creating folder: Project (c6db9f990e5244cab8bf41ddba01bf85). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.420414] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c999614-0413-4161-b570-116ff72ec2eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.431196] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Created folder: Project (c6db9f990e5244cab8bf41ddba01bf85) in parent group-v580775. [ 1017.431386] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Creating folder: Instances. Parent ref: group-v580958. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.431610] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b52e2c2-6ecf-4890-be19-670f481ea39d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.435869] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1017.436163] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4368f116-1053-4ada-a529-c309a78addb4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.441615] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Created folder: Instances in parent group-v580958. [ 1017.441861] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.442155] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1017.443302] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9092418d-5959-4512-839c-f57019d5e915 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.460584] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1017.460584] env[68285]: value = "task-2891787" [ 1017.460584] env[68285]: _type = "Task" [ 1017.460584] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.466599] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1017.466599] env[68285]: value = "task-2891788" [ 1017.466599] env[68285]: _type = "Task" [ 1017.466599] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.475021] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.480205] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891788, 'name': CreateVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.602647] env[68285]: DEBUG nova.compute.manager [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Received event network-changed-bb6fa717-2a9e-4638-a045-be1c5301c96b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1017.603013] env[68285]: DEBUG nova.compute.manager [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Refreshing instance network info cache due to event network-changed-bb6fa717-2a9e-4638-a045-be1c5301c96b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1017.603585] env[68285]: DEBUG oslo_concurrency.lockutils [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] Acquiring lock "refresh_cache-1dce61a2-0fe2-4384-835c-7e324446d7cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.603841] env[68285]: DEBUG oslo_concurrency.lockutils [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] Acquired lock "refresh_cache-1dce61a2-0fe2-4384-835c-7e324446d7cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.604119] env[68285]: DEBUG nova.network.neutron [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Refreshing network info cache for port bb6fa717-2a9e-4638-a045-be1c5301c96b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1017.621492] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891781, 'name': CreateSnapshot_Task, 'duration_secs': 0.973279} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.621758] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1017.622573] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a755d746-6798-420f-bf66-dd13a2967122 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.881530] env[68285]: DEBUG nova.objects.instance [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lazy-loading 'migration_context' on Instance uuid b3b7f551-81aa-4ac4-9906-020fac5f01f7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.907538] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891784, 'name': CreateVM_Task, 'duration_secs': 0.301047} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.908597] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.908597] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.908597] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.908730] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.909372] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecb77bb5-9b32-4e77-bd90-eb14d28940dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.913863] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e24cd27d-c61e-456e-a7e3-8b87e8c8b2d3 tempest-ServerAddressesNegativeTestJSON-928636979 tempest-ServerAddressesNegativeTestJSON-928636979-project-member] Lock "14285f6e-10a4-4077-a666-3c8d0cc1b87c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.365s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.916537] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1017.916537] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521bf370-42d8-737a-0870-75645ba627fb" [ 1017.916537] env[68285]: _type = "Task" [ 1017.916537] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.924602] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521bf370-42d8-737a-0870-75645ba627fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.974052] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891787, 'name': PowerOffVM_Task, 'duration_secs': 0.157704} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.974716] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.975397] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1017.976189] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f70386-97f9-4269-b7e2-61321ce892cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.981579] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891788, 'name': CreateVM_Task, 'duration_secs': 0.353347} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.982036] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.982659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.985350] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.985578] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-704d9bac-21b4-407d-b0c3-6b9e3f15d2fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.014422] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1018.014654] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1018.014863] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Deleting the datastore file [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1018.015165] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38922c0a-28d8-4f14-a73a-4fcaf6d4f6fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.022161] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1018.022161] env[68285]: value = "task-2891790" [ 1018.022161] env[68285]: _type = "Task" [ 1018.022161] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.030108] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.140603] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1018.140924] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dfbc9aa1-51c5-41cc-8be5-5e8dc38f5f8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.148838] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1018.148838] env[68285]: value = "task-2891791" [ 1018.148838] env[68285]: _type = "Task" [ 1018.148838] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.158339] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891791, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.435100] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521bf370-42d8-737a-0870-75645ba627fb, 'name': SearchDatastore_Task, 'duration_secs': 0.009555} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.435730] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.435730] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.435882] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.436015] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.436201] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.436481] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.436778] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1018.437011] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-625ebb79-0a37-411e-992a-14c43e713c14 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.444235] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ef25b43-3fde-4117-928a-b85ee6cb49e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.451474] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1018.451474] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52439986-cd2f-15ad-e16d-421ab43caa82" [ 1018.451474] env[68285]: _type = "Task" [ 1018.451474] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.455608] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.455791] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1018.456870] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8783f804-b7a6-4a82-8f15-4b0f58c824ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.463034] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52439986-cd2f-15ad-e16d-421ab43caa82, 'name': SearchDatastore_Task, 'duration_secs': 0.007876} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.466087] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.466338] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.466566] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.472075] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1018.472075] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5215d9f1-9b9d-9faa-c264-2b3dac1e793d" [ 1018.472075] env[68285]: _type = "Task" [ 1018.472075] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.481926] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5215d9f1-9b9d-9faa-c264-2b3dac1e793d, 'name': SearchDatastore_Task, 'duration_secs': 0.008086} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.482584] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21731953-5d36-49a1-9cbb-d7c47281a86f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.487506] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1018.487506] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5264c4a7-9b9c-2c85-8ca8-f5c1def398be" [ 1018.487506] env[68285]: _type = "Task" [ 1018.487506] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.494881] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5264c4a7-9b9c-2c85-8ca8-f5c1def398be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.519908] env[68285]: DEBUG nova.network.neutron [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Updated VIF entry in instance network info cache for port bb6fa717-2a9e-4638-a045-be1c5301c96b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1018.520279] env[68285]: DEBUG nova.network.neutron [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Updating instance_info_cache with network_info: [{"id": "bb6fa717-2a9e-4638-a045-be1c5301c96b", "address": "fa:16:3e:75:1c:2e", "network": {"id": "48e6d83c-16a8-40a0-8ba2-8281c0a5fa97", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1959021458-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6db9f990e5244cab8bf41ddba01bf85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb6fa717-2a", "ovs_interfaceid": "bb6fa717-2a9e-4638-a045-be1c5301c96b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.531929] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113552} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.532265] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.532456] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1018.532842] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1018.660313] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891791, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.874782] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7ba53b-6bb5-40cc-8f43-f2babf15c6b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.882720] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08bd96b-5d57-4690-bcd6-2a9a43137c7c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.918147] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a6e2ae-a0c8-43a7-9c20-e3f9e6b86e4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.925673] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940c2de1-487e-4369-bfb4-238ec569722f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.940894] env[68285]: DEBUG nova.compute.provider_tree [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.997414] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5264c4a7-9b9c-2c85-8ca8-f5c1def398be, 'name': SearchDatastore_Task, 'duration_secs': 0.008087} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.997677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.997925] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.998231] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.998431] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.998657] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6afcf59a-f91d-411d-8c18-cc5313315c80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.000586] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ade70841-b22e-4f00-bf35-62c85eb6c4ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.006590] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1019.006590] env[68285]: value = "task-2891792" [ 1019.006590] env[68285]: _type = "Task" [ 1019.006590] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.011374] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.011478] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.012551] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b9cd8e-4e7d-4308-bc65-ee4deac99e31 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.018239] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.021627] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1019.021627] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52652945-e697-cb4d-5dc8-9943cf477792" [ 1019.021627] env[68285]: _type = "Task" [ 1019.021627] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.024827] env[68285]: DEBUG oslo_concurrency.lockutils [req-5d63b157-3101-4ba4-9748-b73a0b4e6cb0 req-9758bd4f-58f8-4ec4-9fae-40045857f1c4 service nova] Releasing lock "refresh_cache-1dce61a2-0fe2-4384-835c-7e324446d7cc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.029947] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52652945-e697-cb4d-5dc8-9943cf477792, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.164332] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891791, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.444615] env[68285]: DEBUG nova.scheduler.client.report [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.517150] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891792, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.532800] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52652945-e697-cb4d-5dc8-9943cf477792, 'name': SearchDatastore_Task, 'duration_secs': 0.008779} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.536058] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1b67f5e-95e5-4f51-8fb6-1889b75091b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.549479] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1019.549479] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52be0edc-583a-6ac9-2ca3-4104284f7898" [ 1019.549479] env[68285]: _type = "Task" [ 1019.549479] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.564544] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52be0edc-583a-6ac9-2ca3-4104284f7898, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.575262] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1019.575567] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.575728] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.575912] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.576224] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.576403] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1019.576645] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1019.576812] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1019.576981] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1019.577169] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1019.577380] env[68285]: DEBUG nova.virt.hardware [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1019.578428] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b39a766-b822-4a18-b927-a44aa4f45bc1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.586025] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a256370-9f8e-4284-b177-7c0d603b3004 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.600299] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1019.605949] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.606238] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1019.606471] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c7ef07b-9211-4733-a668-688b2b0b68a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.623459] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1019.623459] env[68285]: value = "task-2891793" [ 1019.623459] env[68285]: _type = "Task" [ 1019.623459] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.634249] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891793, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.659890] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891791, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.021828] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571274} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.022157] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.022412] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.022568] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-763736ff-3b83-4a65-aa76-9aa64ae0e100 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.030456] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1020.030456] env[68285]: value = "task-2891794" [ 1020.030456] env[68285]: _type = "Task" [ 1020.030456] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.040456] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.059680] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52be0edc-583a-6ac9-2ca3-4104284f7898, 'name': SearchDatastore_Task, 'duration_secs': 0.02291} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.060012] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.060334] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 1dce61a2-0fe2-4384-835c-7e324446d7cc/1dce61a2-0fe2-4384-835c-7e324446d7cc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.060655] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-427c7b5c-fc35-456b-9e12-1a44d2fd3244 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.068248] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1020.068248] env[68285]: value = "task-2891795" [ 1020.068248] env[68285]: _type = "Task" [ 1020.068248] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.076095] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.132329] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891793, 'name': CreateVM_Task, 'duration_secs': 0.359249} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.132504] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1020.132921] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.133096] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.133414] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1020.133662] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9ce2960-c6aa-40bc-b84a-f61c5be5a61f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.138615] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1020.138615] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52722461-7277-6f36-0ed9-9acadf1ff384" [ 1020.138615] env[68285]: _type = "Task" [ 1020.138615] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.145407] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52722461-7277-6f36-0ed9-9acadf1ff384, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.163635] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891791, 'name': CloneVM_Task, 'duration_secs': 1.655278} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.163905] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Created linked-clone VM from snapshot [ 1020.165697] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e920de7-6310-45cf-be17-44586eb50142 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.172853] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Uploading image 7c47a7cd-77c4-4c1b-9860-d6c69cedb919 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1020.195011] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1020.195011] env[68285]: value = "vm-580962" [ 1020.195011] env[68285]: _type = "VirtualMachine" [ 1020.195011] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1020.195198] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d71ec9a3-f145-41fe-85cf-7a1c016edb87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.202442] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease: (returnval){ [ 1020.202442] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525d07a2-b282-2c28-fe64-d215daa84129" [ 1020.202442] env[68285]: _type = "HttpNfcLease" [ 1020.202442] env[68285]: } obtained for exporting VM: (result){ [ 1020.202442] env[68285]: value = "vm-580962" [ 1020.202442] env[68285]: _type = "VirtualMachine" [ 1020.202442] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1020.202442] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the lease: (returnval){ [ 1020.202442] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525d07a2-b282-2c28-fe64-d215daa84129" [ 1020.202442] env[68285]: _type = "HttpNfcLease" [ 1020.202442] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1020.208447] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1020.208447] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525d07a2-b282-2c28-fe64-d215daa84129" [ 1020.208447] env[68285]: _type = "HttpNfcLease" [ 1020.208447] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1020.458838] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.082s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.470864] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 48.835s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.470864] env[68285]: DEBUG nova.objects.instance [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1020.541720] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069109} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.542026] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.542884] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d17222-ba9c-4212-8037-01d90b3943c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.564245] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.565366] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b8f9724-1d23-4707-a78c-6dc591779672 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.589811] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891795, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512191} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.591192] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 1dce61a2-0fe2-4384-835c-7e324446d7cc/1dce61a2-0fe2-4384-835c-7e324446d7cc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.591414] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.591724] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1020.591724] env[68285]: value = "task-2891797" [ 1020.591724] env[68285]: _type = "Task" [ 1020.591724] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.591907] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f89be906-5377-4325-8a1f-9539ea2f4afc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.603261] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891797, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.604433] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1020.604433] env[68285]: value = "task-2891798" [ 1020.604433] env[68285]: _type = "Task" [ 1020.604433] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.612515] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891798, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.650728] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52722461-7277-6f36-0ed9-9acadf1ff384, 'name': SearchDatastore_Task, 'duration_secs': 0.009639} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.651195] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.651457] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.651749] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.651958] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.652220] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.652538] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1baa4d5-e7c5-4707-929f-bfd92046a26e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.660264] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.660472] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1020.662721] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-074aac7b-8978-4f9c-9f8f-345cb5f01f11 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.667609] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1020.667609] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527bf4ae-5653-6021-36c2-d1a6a1a34e81" [ 1020.667609] env[68285]: _type = "Task" [ 1020.667609] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.675023] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527bf4ae-5653-6021-36c2-d1a6a1a34e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.709173] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1020.709173] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525d07a2-b282-2c28-fe64-d215daa84129" [ 1020.709173] env[68285]: _type = "HttpNfcLease" [ 1020.709173] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1020.709465] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1020.709465] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525d07a2-b282-2c28-fe64-d215daa84129" [ 1020.709465] env[68285]: _type = "HttpNfcLease" [ 1020.709465] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1020.710185] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d588545-a081-448f-8b2e-3fd8aa13a47f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.717171] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274c8f6-c9f8-cb5d-1aab-782466b88f43/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1020.717346] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274c8f6-c9f8-cb5d-1aab-782466b88f43/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1020.828025] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-45b94b62-8c79-4794-8ead-860ccb9f4299 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.103888] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891797, 'name': ReconfigVM_Task, 'duration_secs': 0.295785} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.104446] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.105142] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53676571-243b-41ad-9786-706ade261a77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.115907] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891798, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073142} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.117205] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1021.117554] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1021.117554] env[68285]: value = "task-2891799" [ 1021.117554] env[68285]: _type = "Task" [ 1021.117554] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.121018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e5dec2-6141-43b3-8c5d-a4807e7297a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.128754] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891799, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.150358] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 1dce61a2-0fe2-4384-835c-7e324446d7cc/1dce61a2-0fe2-4384-835c-7e324446d7cc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1021.150358] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ebb3268-4aa6-4e4e-858d-66312007d218 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.173333] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1021.173333] env[68285]: value = "task-2891800" [ 1021.173333] env[68285]: _type = "Task" [ 1021.173333] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.181671] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527bf4ae-5653-6021-36c2-d1a6a1a34e81, 'name': SearchDatastore_Task, 'duration_secs': 0.010947} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.182583] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5840de3c-a7d1-44c4-93c6-ca5ff2543bec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.188025] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.191332] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1021.191332] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528cb057-227d-9925-c31e-46d0d91d3eb8" [ 1021.191332] env[68285]: _type = "Task" [ 1021.191332] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.202824] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528cb057-227d-9925-c31e-46d0d91d3eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.491418] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eafcd204-e77c-49da-a4bf-96c262634e4b tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.493143] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.204s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.493276] env[68285]: DEBUG nova.objects.instance [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lazy-loading 'resources' on Instance uuid b0f32ce2-92fd-4290-a2f4-e5658f775f4f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.635710] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891799, 'name': Rename_Task, 'duration_secs': 0.185076} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.636088] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.636366] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c31bbded-1540-4077-8f72-83223161f6b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.644473] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1021.644473] env[68285]: value = "task-2891801" [ 1021.644473] env[68285]: _type = "Task" [ 1021.644473] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.653830] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.690323] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891800, 'name': ReconfigVM_Task, 'duration_secs': 0.369332} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.690323] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 1dce61a2-0fe2-4384-835c-7e324446d7cc/1dce61a2-0fe2-4384-835c-7e324446d7cc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.690323] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f322c35-91dd-45f4-a093-f2dd70b5a443 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.705318] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528cb057-227d-9925-c31e-46d0d91d3eb8, 'name': SearchDatastore_Task, 'duration_secs': 0.011351} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.706849] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.707173] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1021.707509] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1021.707509] env[68285]: value = "task-2891802" [ 1021.707509] env[68285]: _type = "Task" [ 1021.707509] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.707693] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3c18616-49a9-4dcf-ae56-f454ff3af20b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.721793] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891802, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.722480] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1021.722480] env[68285]: value = "task-2891803" [ 1021.722480] env[68285]: _type = "Task" [ 1021.722480] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.731794] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.021309] env[68285]: INFO nova.compute.manager [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Swapping old allocation on dict_keys(['7bdf675d-15ae-4a4b-9c03-79d8c773b76b']) held by migration f79dddc6-371b-407f-8616-9c12a70c50cf for instance [ 1022.052215] env[68285]: DEBUG nova.scheduler.client.report [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Overwriting current allocation {'allocations': {'7bdf675d-15ae-4a4b-9c03-79d8c773b76b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 91}}, 'project_id': 'f891a62d3df3400fa53ac94230bcb8a9', 'user_id': 'cd27450be410458ba1f009b191126755', 'consumer_generation': 1} on consumer b3b7f551-81aa-4ac4-9906-020fac5f01f7 {{(pid=68285) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1022.154613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.154903] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquired lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.155114] env[68285]: DEBUG nova.network.neutron [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.165218] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891801, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.228432] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891802, 'name': Rename_Task, 'duration_secs': 0.216291} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.233410] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1022.234046] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dbaa902-c3b7-416b-a7a7-4d0fc279ccb7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.245484] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891803, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.247400] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1022.247400] env[68285]: value = "task-2891804" [ 1022.247400] env[68285]: _type = "Task" [ 1022.247400] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.257031] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.524773] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410ccafe-4c06-43e6-ac68-276422a53218 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.533387] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd0a7d8-f016-4dd0-8018-7b5694144039 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.370367] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16238701-dcf3-4c9e-9efa-42b05fefdd5c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.385269] env[68285]: DEBUG oslo_vmware.api [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891804, 'name': PowerOnVM_Task, 'duration_secs': 0.80719} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.385584] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690158} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.385836] env[68285]: DEBUG oslo_vmware.api [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891801, 'name': PowerOnVM_Task, 'duration_secs': 0.866503} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.388172] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.388447] env[68285]: INFO nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Took 8.71 seconds to spawn the instance on the hypervisor. [ 1023.388687] env[68285]: DEBUG nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.389025] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.389713] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.389713] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.389834] env[68285]: INFO nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Took 6.08 seconds to spawn the instance on the hypervisor. [ 1023.389924] env[68285]: DEBUG nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.390799] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c507086f-40b1-41ad-ae72-5bd766a7a05f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.393442] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8077f2bb-2dc9-4571-8b6c-cd6e9250d5c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.397229] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7803f3-bba1-4617-87ec-64e0ff46a2c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.399531] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445ea5b3-57cd-45c6-b04a-af132d340423 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.414709] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1023.414709] env[68285]: value = "task-2891805" [ 1023.414709] env[68285]: _type = "Task" [ 1023.414709] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.429934] env[68285]: DEBUG nova.compute.provider_tree [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.440631] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.698461] env[68285]: DEBUG nova.network.neutron [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance_info_cache with network_info: [{"id": "567381f7-5f78-4920-beb9-db0ef3479244", "address": "fa:16:3e:68:fd:5e", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap567381f7-5f", "ovs_interfaceid": "567381f7-5f78-4920-beb9-db0ef3479244", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.933840] env[68285]: INFO nova.compute.manager [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Took 56.72 seconds to build instance. [ 1023.943248] env[68285]: DEBUG nova.scheduler.client.report [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.950587] env[68285]: INFO nova.compute.manager [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Took 59.63 seconds to build instance. [ 1023.959571] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079769} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.960244] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.961855] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c009929a-551d-4a58-b384-32a7e93d8681 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.988179] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.989450] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47fd536b-62b1-4467-9962-9bf3c08f28a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.012971] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1024.012971] env[68285]: value = "task-2891806" [ 1024.012971] env[68285]: _type = "Task" [ 1024.012971] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.202861] env[68285]: DEBUG oslo_concurrency.lockutils [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Releasing lock "refresh_cache-b3b7f551-81aa-4ac4-9906-020fac5f01f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.202861] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1024.202861] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92265d12-e20f-4be9-8d77-95a7cc16d583 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.210634] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1024.210634] env[68285]: value = "task-2891807" [ 1024.210634] env[68285]: _type = "Task" [ 1024.210634] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.222687] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891807, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.448897] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.956s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.451616] env[68285]: DEBUG oslo_concurrency.lockutils [None req-95749f81-2206-4edb-9694-623d3a80e1cc tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "2a9b3b56-8607-4da8-9186-8a933cfe0351" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.246s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.452994] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.907s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.452994] env[68285]: DEBUG nova.objects.instance [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lazy-loading 'resources' on Instance uuid 87582063-50f9-4518-ad2d-915c9cd49b19 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.457273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-534a629a-b83b-482e-beb3-19da14f2c2c0 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.945s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.484490] env[68285]: INFO nova.scheduler.client.report [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Deleted allocations for instance b0f32ce2-92fd-4290-a2f4-e5658f775f4f [ 1024.525855] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891806, 'name': ReconfigVM_Task, 'duration_secs': 0.481146} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.525855] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3/9e81990d-e63e-48a7-8941-f0298ca184b3.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.526873] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c260e2fc-6056-4431-86c8-5bdb57f11f79 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.537203] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1024.537203] env[68285]: value = "task-2891808" [ 1024.537203] env[68285]: _type = "Task" [ 1024.537203] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.542975] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891808, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.729321] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891807, 'name': PowerOffVM_Task, 'duration_secs': 0.323795} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.731046] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1024.731046] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5d08af6e-040e-4fac-974b-cfa5ed6c710a',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1109178364',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.731046] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.731046] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.731312] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.731312] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.731464] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.731691] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.731885] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.732757] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.733014] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.733289] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.739623] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7ae06f3-5844-4f41-9b6e-23cad5e9c08e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.759513] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1024.759513] env[68285]: value = "task-2891809" [ 1024.759513] env[68285]: _type = "Task" [ 1024.759513] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.772764] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891809, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.865267] env[68285]: INFO nova.compute.manager [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Rebuilding instance [ 1024.915756] env[68285]: DEBUG nova.compute.manager [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.916651] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d804e8e9-b662-44ec-9179-8e63a7c201df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.933978] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "1dce61a2-0fe2-4384-835c-7e324446d7cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.934396] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.934734] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "1dce61a2-0fe2-4384-835c-7e324446d7cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.934994] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.935287] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.938955] env[68285]: INFO nova.compute.manager [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Terminating instance [ 1024.994698] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4408a96f-7296-4147-a38f-df8368d8903c tempest-ListServerFiltersTestJSON-1463485235 tempest-ListServerFiltersTestJSON-1463485235-project-member] Lock "b0f32ce2-92fd-4290-a2f4-e5658f775f4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.661s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.049708] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891808, 'name': Rename_Task, 'duration_secs': 0.162741} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.049708] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.049708] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad030503-2f76-4339-82db-595fdd6e6c54 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.055964] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1025.055964] env[68285]: value = "task-2891810" [ 1025.055964] env[68285]: _type = "Task" [ 1025.055964] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.068702] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.281188] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891809, 'name': ReconfigVM_Task, 'duration_secs': 0.238824} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.282674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e6fdb3-8133-4b6d-a2a4-a6131517a199 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.304459] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='5d08af6e-040e-4fac-974b-cfa5ed6c710a',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1109178364',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1025.304954] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.305234] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1025.308018] env[68285]: DEBUG nova.virt.hardware [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1025.311026] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60b74c89-967b-4209-b52c-8ae5a2d5cd24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.320967] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1025.320967] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5222322b-3cd6-8d51-80a9-a9f99f0a761a" [ 1025.320967] env[68285]: _type = "Task" [ 1025.320967] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.330636] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5222322b-3cd6-8d51-80a9-a9f99f0a761a, 'name': SearchDatastore_Task, 'duration_secs': 0.010293} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.336083] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1025.339080] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd1ac237-3095-406c-8e5f-df72b77b8c0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.358215] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1025.358215] env[68285]: value = "task-2891811" [ 1025.358215] env[68285]: _type = "Task" [ 1025.358215] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.373315] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891811, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.449282] env[68285]: DEBUG nova.compute.manager [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1025.449551] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1025.453945] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fed0c9c-c2a2-481a-8034-b12905563744 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.460015] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.460478] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-080e8751-4ead-4193-9b1e-329cdaed171b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.468217] env[68285]: DEBUG oslo_vmware.api [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1025.468217] env[68285]: value = "task-2891812" [ 1025.468217] env[68285]: _type = "Task" [ 1025.468217] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.477154] env[68285]: DEBUG oslo_vmware.api [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.572108] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891810, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.579910] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d760c43f-e3a2-4a5b-b3a6-cbfa98f23c69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.590303] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f1f22b-e035-424e-a492-49258d90d789 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.645038] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bda75b3-7671-4919-acde-6be797634f1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.656890] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12baa5a6-3512-4f42-a70d-4b8bbc82738a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.680988] env[68285]: DEBUG nova.compute.provider_tree [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.868694] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891811, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.932657] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.933090] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88dc7ae1-a175-44bd-86b1-a1be50e49df8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.943905] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1025.943905] env[68285]: value = "task-2891813" [ 1025.943905] env[68285]: _type = "Task" [ 1025.943905] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.953371] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.977878] env[68285]: DEBUG oslo_vmware.api [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.073195] env[68285]: DEBUG oslo_vmware.api [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891810, 'name': PowerOnVM_Task, 'duration_secs': 0.743418} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.073195] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.073195] env[68285]: DEBUG nova.compute.manager [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.073734] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b93211-2fa0-46c3-9af4-89b3ad94dc48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.186331] env[68285]: DEBUG nova.scheduler.client.report [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.371197] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891811, 'name': ReconfigVM_Task, 'duration_secs': 0.831937} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.372074] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1026.373103] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e378ed6a-1154-49db-9673-a7ff6c77b9f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.401024] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.401024] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ad9030f-2bf6-4d13-90a4-a5595080c4e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.418587] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1026.418587] env[68285]: value = "task-2891814" [ 1026.418587] env[68285]: _type = "Task" [ 1026.418587] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.431326] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891814, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.453906] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891813, 'name': PowerOffVM_Task, 'duration_secs': 0.148294} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.454254] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.454440] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.455278] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53284813-eff5-4b6b-9cff-560ff6e84418 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.464050] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.464397] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c070e9f-f272-4dfe-92ce-1b791332fe34 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.479685] env[68285]: DEBUG oslo_vmware.api [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891812, 'name': PowerOffVM_Task, 'duration_secs': 0.725941} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.479945] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.480122] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.480416] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53a545e8-2ee4-4759-8fb3-a8cbbda0c4c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.505854] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.505854] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.505854] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Deleting the datastore file [datastore2] 2a9b3b56-8607-4da8-9186-8a933cfe0351 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.505854] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7317130c-3367-4d08-83bf-f98fc0cbfdb5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.514247] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1026.514247] env[68285]: value = "task-2891817" [ 1026.514247] env[68285]: _type = "Task" [ 1026.514247] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.522471] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.555457] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.555457] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.555457] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Deleting the datastore file [datastore2] 1dce61a2-0fe2-4384-835c-7e324446d7cc {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.555666] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f38e4c3-4ef8-43bb-8fe5-668e816f69d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.563160] env[68285]: DEBUG oslo_vmware.api [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for the task: (returnval){ [ 1026.563160] env[68285]: value = "task-2891818" [ 1026.563160] env[68285]: _type = "Task" [ 1026.563160] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.575125] env[68285]: DEBUG oslo_vmware.api [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.591855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.697021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.241s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.697021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.923s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.697021] env[68285]: DEBUG nova.objects.instance [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'resources' on Instance uuid c690490f-9278-4595-8286-d4fd970bbc39 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.732179] env[68285]: INFO nova.scheduler.client.report [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Deleted allocations for instance 87582063-50f9-4518-ad2d-915c9cd49b19 [ 1026.928934] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891814, 'name': ReconfigVM_Task, 'duration_secs': 0.461813} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.929355] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Reconfigured VM instance instance-0000002b to attach disk [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7/b3b7f551-81aa-4ac4-9906-020fac5f01f7.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.930204] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea99acb-bec2-4b8d-8146-a3b0fa3f130c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.948190] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cca0e8f-6e3a-4980-a12b-f9153fb18132 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.965715] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ebdcbc-912b-45f8-b6ef-b4ebef87dd83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.985186] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b1e205-b66a-400d-b104-46cb387d7ff0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.991819] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1026.992071] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7437c1e0-1390-4955-90b6-22321bcaa113 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.998712] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1026.998712] env[68285]: value = "task-2891819" [ 1026.998712] env[68285]: _type = "Task" [ 1026.998712] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.008206] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.022930] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205708} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.023190] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.023364] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.023541] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.073739] env[68285]: DEBUG oslo_vmware.api [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Task: {'id': task-2891818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276001} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.073995] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.074188] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.074417] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.074546] env[68285]: INFO nova.compute.manager [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1027.074797] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.075037] env[68285]: DEBUG nova.compute.manager [-] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1027.075142] env[68285]: DEBUG nova.network.neutron [-] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1027.249427] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6db50cc1-efb4-47ca-b8ed-25f44cd2cf65 tempest-AttachInterfacesUnderV243Test-160219597 tempest-AttachInterfacesUnderV243Test-160219597-project-member] Lock "87582063-50f9-4518-ad2d-915c9cd49b19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.572s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.290940] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "9e81990d-e63e-48a7-8941-f0298ca184b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.291526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "9e81990d-e63e-48a7-8941-f0298ca184b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.291526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "9e81990d-e63e-48a7-8941-f0298ca184b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.291665] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "9e81990d-e63e-48a7-8941-f0298ca184b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.291866] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "9e81990d-e63e-48a7-8941-f0298ca184b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.296103] env[68285]: INFO nova.compute.manager [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Terminating instance [ 1027.510592] env[68285]: DEBUG oslo_vmware.api [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891819, 'name': PowerOnVM_Task, 'duration_secs': 0.416224} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.511250] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1027.712020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a054b585-8f8c-4dab-9e5a-e772f01c67ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.718918] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053b0852-2421-481b-affa-cb29035e261f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.760209] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3384191d-3a68-493b-8112-27b7a3832d0a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.768564] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29659104-ce68-4d0f-af2e-72b47b84c9e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.784162] env[68285]: DEBUG nova.compute.provider_tree [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1027.801678] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "refresh_cache-9e81990d-e63e-48a7-8941-f0298ca184b3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.801990] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquired lock "refresh_cache-9e81990d-e63e-48a7-8941-f0298ca184b3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.802198] env[68285]: DEBUG nova.network.neutron [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1027.853293] env[68285]: DEBUG nova.compute.manager [req-b9f643dd-f78d-4820-a432-e03a62e72579 req-3a23ce30-c07e-472f-8262-61b715bdba6e service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Received event network-vif-deleted-bb6fa717-2a9e-4638-a045-be1c5301c96b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.853842] env[68285]: INFO nova.compute.manager [req-b9f643dd-f78d-4820-a432-e03a62e72579 req-3a23ce30-c07e-472f-8262-61b715bdba6e service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Neutron deleted interface bb6fa717-2a9e-4638-a045-be1c5301c96b; detaching it from the instance and deleting it from the info cache [ 1027.854147] env[68285]: DEBUG nova.network.neutron [req-b9f643dd-f78d-4820-a432-e03a62e72579 req-3a23ce30-c07e-472f-8262-61b715bdba6e service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.859569] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "bb806297-47c6-45b7-a177-f3300fa1e29a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.859868] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "bb806297-47c6-45b7-a177-f3300fa1e29a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.989438] env[68285]: DEBUG nova.network.neutron [-] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.063218] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1028.063218] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.063218] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1028.064434] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.064741] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1028.064992] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1028.065332] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1028.065622] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1028.065888] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1028.066190] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1028.066611] env[68285]: DEBUG nova.virt.hardware [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1028.068016] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d6520b-1f41-4ae2-bc1d-c1c25aa14747 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.077955] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd190a5-dbbd-4681-b839-db17a0d8a8d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.096178] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.101123] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.101568] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1028.101906] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f132dadf-4bf6-4186-bee1-afb555a280a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.121556] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.121556] env[68285]: value = "task-2891820" [ 1028.121556] env[68285]: _type = "Task" [ 1028.121556] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.129653] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891820, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.137990] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274c8f6-c9f8-cb5d-1aab-782466b88f43/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1028.138880] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2237366-606e-4701-91ee-638a9a2bea4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.168272] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274c8f6-c9f8-cb5d-1aab-782466b88f43/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1028.168272] env[68285]: ERROR oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274c8f6-c9f8-cb5d-1aab-782466b88f43/disk-0.vmdk due to incomplete transfer. [ 1028.168272] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7efddc5e-43d3-4da2-951c-64211c3ebf97 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.169588] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5274c8f6-c9f8-cb5d-1aab-782466b88f43/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1028.169839] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Uploaded image 7c47a7cd-77c4-4c1b-9860-d6c69cedb919 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1028.173032] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1028.173353] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-36ad50a4-67c3-4271-8dbf-9c1e204a6ba9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.180565] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1028.180565] env[68285]: value = "task-2891821" [ 1028.180565] env[68285]: _type = "Task" [ 1028.180565] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.201530] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891821, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.311678] env[68285]: ERROR nova.scheduler.client.report [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [req-80f19748-2023-4c51-a5b0-9beacd9880e3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-80f19748-2023-4c51-a5b0-9beacd9880e3"}]} [ 1028.326147] env[68285]: DEBUG nova.network.neutron [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1028.338367] env[68285]: DEBUG nova.scheduler.client.report [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1028.359285] env[68285]: DEBUG nova.scheduler.client.report [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1028.359672] env[68285]: DEBUG nova.compute.provider_tree [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.365070] env[68285]: DEBUG nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1028.371401] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1746b5b2-af0a-4172-849f-ad9248d37c8c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.383824] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c79781-7748-4ffb-9a70-ff8009851100 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.399038] env[68285]: DEBUG nova.scheduler.client.report [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1028.402077] env[68285]: DEBUG nova.network.neutron [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.425716] env[68285]: DEBUG nova.compute.manager [req-b9f643dd-f78d-4820-a432-e03a62e72579 req-3a23ce30-c07e-472f-8262-61b715bdba6e service nova] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Detach interface failed, port_id=bb6fa717-2a9e-4638-a045-be1c5301c96b, reason: Instance 1dce61a2-0fe2-4384-835c-7e324446d7cc could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1028.427114] env[68285]: DEBUG nova.scheduler.client.report [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1028.492504] env[68285]: INFO nova.compute.manager [-] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Took 1.42 seconds to deallocate network for instance. [ 1028.526917] env[68285]: INFO nova.compute.manager [None req-982cc1b4-f342-487a-939a-21e92ac41140 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance to original state: 'active' [ 1028.639667] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891820, 'name': CreateVM_Task, 'duration_secs': 0.40606} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.639667] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.639667] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.639667] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.639667] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.639667] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-756ecd9a-3588-443d-9eb8-da9ba1f8d41a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.648633] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1028.648633] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ec3c43-5ba4-38f0-d083-d1f121140785" [ 1028.648633] env[68285]: _type = "Task" [ 1028.648633] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.656661] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ec3c43-5ba4-38f0-d083-d1f121140785, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.692759] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891821, 'name': Destroy_Task, 'duration_secs': 0.465536} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.693035] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Destroyed the VM [ 1028.693253] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1028.693497] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b4090ac6-d4c0-4ccf-b64c-4f1894331f84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.700834] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1028.700834] env[68285]: value = "task-2891822" [ 1028.700834] env[68285]: _type = "Task" [ 1028.700834] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.708210] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891822, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.886181] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c5474a-ba0d-47c3-840f-e2f5b5a486c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.895754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.897064] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb7327b-135f-4564-a439-0a5523bbb7c2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.931752] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Releasing lock "refresh_cache-9e81990d-e63e-48a7-8941-f0298ca184b3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.932181] env[68285]: DEBUG nova.compute.manager [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1028.932406] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.933631] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a304e7-51da-411f-81b6-8ea2b9b8ef3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.936889] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ddd601-948a-49be-952f-5ae37150e7e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.947507] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16f375a-b55c-4a8c-a404-96043e443e8d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.951707] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.952010] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-156e7b30-1c36-40cd-aafc-51fa5439cddd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.964879] env[68285]: DEBUG nova.compute.provider_tree [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1028.970988] env[68285]: DEBUG oslo_vmware.api [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1028.970988] env[68285]: value = "task-2891823" [ 1028.970988] env[68285]: _type = "Task" [ 1028.970988] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.980423] env[68285]: DEBUG oslo_vmware.api [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.004865] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.160784] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ec3c43-5ba4-38f0-d083-d1f121140785, 'name': SearchDatastore_Task, 'duration_secs': 0.032753} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.161151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.161394] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.161631] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.161776] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.161956] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.162280] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b93dd37e-fac1-49d1-8b9e-c961f7d36177 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.172595] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.172787] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.173554] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ed365ef-daf0-4d06-8272-322b63298e4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.180021] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1029.180021] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528cfad0-c3f7-4a52-26e6-8d25edc2195e" [ 1029.180021] env[68285]: _type = "Task" [ 1029.180021] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.189076] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528cfad0-c3f7-4a52-26e6-8d25edc2195e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.210336] env[68285]: DEBUG oslo_vmware.api [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891822, 'name': RemoveSnapshot_Task, 'duration_secs': 0.509128} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.210997] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1029.211460] env[68285]: INFO nova.compute.manager [None req-5808e500-611e-4fe3-93b8-0ff349e6447c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Took 13.14 seconds to snapshot the instance on the hypervisor. [ 1029.487058] env[68285]: DEBUG oslo_vmware.api [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891823, 'name': PowerOffVM_Task, 'duration_secs': 0.124256} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.487319] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.487489] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.487740] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26eadc13-1f45-4339-8814-ea416669f639 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.511067] env[68285]: DEBUG nova.scheduler.client.report [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1029.511266] env[68285]: DEBUG nova.compute.provider_tree [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 93 to 94 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1029.511449] env[68285]: DEBUG nova.compute.provider_tree [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1029.516610] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.516753] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.516935] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Deleting the datastore file [datastore2] 9e81990d-e63e-48a7-8941-f0298ca184b3 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.517576] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69ebf871-3e3b-4fba-b6eb-1295276747b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.525981] env[68285]: DEBUG oslo_vmware.api [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for the task: (returnval){ [ 1029.525981] env[68285]: value = "task-2891825" [ 1029.525981] env[68285]: _type = "Task" [ 1029.525981] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.534392] env[68285]: DEBUG oslo_vmware.api [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891825, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.691070] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528cfad0-c3f7-4a52-26e6-8d25edc2195e, 'name': SearchDatastore_Task, 'duration_secs': 0.023573} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.691818] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6daae79-7f3c-49d1-a29a-b87d039be76f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.697354] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1029.697354] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523a65f7-5005-0c8d-48d4-4c860712a310" [ 1029.697354] env[68285]: _type = "Task" [ 1029.697354] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.704096] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523a65f7-5005-0c8d-48d4-4c860712a310, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.978587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "3c71f649-b456-45a0-a113-725a529702a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.978587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "3c71f649-b456-45a0-a113-725a529702a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.978587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "3c71f649-b456-45a0-a113-725a529702a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.978587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "3c71f649-b456-45a0-a113-725a529702a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.978587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "3c71f649-b456-45a0-a113-725a529702a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.982083] env[68285]: INFO nova.compute.manager [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Terminating instance [ 1030.018908] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.322s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.022743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.379s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.022743] env[68285]: DEBUG nova.objects.instance [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lazy-loading 'resources' on Instance uuid e28d0927-17c2-4256-93d4-ef0cc2c9b92a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.038596] env[68285]: DEBUG oslo_vmware.api [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Task: {'id': task-2891825, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122693} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.040394] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.040898] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.041436] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.041614] env[68285]: INFO nova.compute.manager [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1030.042803] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1030.042803] env[68285]: DEBUG nova.compute.manager [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1030.042803] env[68285]: DEBUG nova.network.neutron [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1030.045623] env[68285]: INFO nova.scheduler.client.report [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted allocations for instance c690490f-9278-4595-8286-d4fd970bbc39 [ 1030.066243] env[68285]: DEBUG nova.network.neutron [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1030.208962] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523a65f7-5005-0c8d-48d4-4c860712a310, 'name': SearchDatastore_Task, 'duration_secs': 0.010049} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.209344] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.209698] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.210047] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f146749b-3c22-4dc6-a309-a7d72d6c7a01 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.217297] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1030.217297] env[68285]: value = "task-2891826" [ 1030.217297] env[68285]: _type = "Task" [ 1030.217297] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.237412] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.488314] env[68285]: DEBUG nova.compute.manager [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1030.488314] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.489418] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1942275c-ea82-42a5-be6d-699b72f00af1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.496987] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "d0b04097-292a-47e7-8f14-199b1650dc2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.497320] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.501443] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1030.501744] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0696114-5a82-4076-9d5a-e01d433bba0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.554228] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1fdd5c99-cce3-4877-a974-30ef564f9a0d tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "c690490f-9278-4595-8286-d4fd970bbc39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.674s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.570190] env[68285]: DEBUG nova.network.neutron [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.572029] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1030.572029] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1030.572029] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleting the datastore file [datastore2] 3c71f649-b456-45a0-a113-725a529702a2 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.576036] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44b37c38-59c9-4236-83d3-4cda7d28d872 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.584038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.584038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.584222] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.584641] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.584796] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.587301] env[68285]: DEBUG oslo_vmware.api [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1030.587301] env[68285]: value = "task-2891828" [ 1030.587301] env[68285]: _type = "Task" [ 1030.587301] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.587901] env[68285]: INFO nova.compute.manager [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Terminating instance [ 1030.608708] env[68285]: DEBUG oslo_vmware.api [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.730112] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891826, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.959276] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdc5333-53a3-4bf7-8979-609fac418c1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.966981] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a5ed3c-ebc8-4a88-bbd0-eb02b0655a89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.998650] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d72a2e5-c43b-4fb9-a645-14e0e9419c7c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.002029] env[68285]: DEBUG nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1031.008638] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d52c72-070f-4ee5-ac95-8a693669b467 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.022229] env[68285]: DEBUG nova.compute.provider_tree [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.073636] env[68285]: INFO nova.compute.manager [-] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Took 1.03 seconds to deallocate network for instance. [ 1031.099346] env[68285]: DEBUG oslo_vmware.api [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453812} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.099512] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.099961] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1031.099961] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1031.100066] env[68285]: INFO nova.compute.manager [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1031.100263] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1031.100441] env[68285]: DEBUG nova.compute.manager [-] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1031.100529] env[68285]: DEBUG nova.network.neutron [-] [instance: 3c71f649-b456-45a0-a113-725a529702a2] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1031.104109] env[68285]: DEBUG nova.compute.manager [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1031.104109] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1031.104109] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feabd5f2-b71f-4980-a5e1-5a86fd65ae89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.112968] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.112968] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17917117-f3b3-467b-9db2-f3d47de94a45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.118489] env[68285]: DEBUG oslo_vmware.api [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1031.118489] env[68285]: value = "task-2891829" [ 1031.118489] env[68285]: _type = "Task" [ 1031.118489] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.125237] env[68285]: DEBUG oslo_vmware.api [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891829, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.228424] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695624} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.228704] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1031.228939] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.229392] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c73ab99-604b-4baf-bdbb-e7bc029a3a30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.235601] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1031.235601] env[68285]: value = "task-2891830" [ 1031.235601] env[68285]: _type = "Task" [ 1031.235601] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.247369] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.526032] env[68285]: DEBUG nova.scheduler.client.report [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1031.530713] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.581120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.626847] env[68285]: DEBUG oslo_vmware.api [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891829, 'name': PowerOffVM_Task, 'duration_secs': 0.202434} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.627186] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.627423] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1031.627750] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45634700-7ee9-4bd8-88d7-cc02c300f965 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.662529] env[68285]: DEBUG nova.compute.manager [req-dd1a509d-93c7-406d-a9eb-54071b336839 req-96233d73-f76e-4c5f-a2b0-5e0d9522c7c5 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Received event network-vif-deleted-4ab469f4-9da1-4748-ab22-7f86098988de {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.662809] env[68285]: INFO nova.compute.manager [req-dd1a509d-93c7-406d-a9eb-54071b336839 req-96233d73-f76e-4c5f-a2b0-5e0d9522c7c5 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Neutron deleted interface 4ab469f4-9da1-4748-ab22-7f86098988de; detaching it from the instance and deleting it from the info cache [ 1031.663109] env[68285]: DEBUG nova.network.neutron [req-dd1a509d-93c7-406d-a9eb-54071b336839 req-96233d73-f76e-4c5f-a2b0-5e0d9522c7c5 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.696074] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1031.696505] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1031.696779] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Deleting the datastore file [datastore1] b3b7f551-81aa-4ac4-9906-020fac5f01f7 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1031.698029] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccce0597-6641-4e6e-a97c-26ead4e03a13 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.705251] env[68285]: DEBUG oslo_vmware.api [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1031.705251] env[68285]: value = "task-2891832" [ 1031.705251] env[68285]: _type = "Task" [ 1031.705251] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.716021] env[68285]: DEBUG oslo_vmware.api [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.746549] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075983} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.746888] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.748150] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b2c463-ca4c-4554-b07f-004382c92436 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.769492] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.769960] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89fd9484-a9a9-45e4-a685-993756709659 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.789621] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1031.789621] env[68285]: value = "task-2891833" [ 1031.789621] env[68285]: _type = "Task" [ 1031.789621] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.798116] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891833, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.020267] env[68285]: DEBUG nova.network.neutron [-] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.033518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.036340] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.042s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.038601] env[68285]: INFO nova.compute.claims [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.059947] env[68285]: INFO nova.scheduler.client.report [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Deleted allocations for instance e28d0927-17c2-4256-93d4-ef0cc2c9b92a [ 1032.165968] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e273e40b-2456-4a5b-84fc-72109fdeae3a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.175795] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec76249-2456-481f-b0f7-6ad22bd7aaac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.211291] env[68285]: DEBUG nova.compute.manager [req-dd1a509d-93c7-406d-a9eb-54071b336839 req-96233d73-f76e-4c5f-a2b0-5e0d9522c7c5 service nova] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Detach interface failed, port_id=4ab469f4-9da1-4748-ab22-7f86098988de, reason: Instance 3c71f649-b456-45a0-a113-725a529702a2 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1032.224741] env[68285]: DEBUG oslo_vmware.api [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213272} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.225031] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1032.225227] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1032.225408] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1032.225581] env[68285]: INFO nova.compute.manager [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1032.225837] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.226218] env[68285]: DEBUG nova.compute.manager [-] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1032.226277] env[68285]: DEBUG nova.network.neutron [-] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1032.300448] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891833, 'name': ReconfigVM_Task, 'duration_secs': 0.33842} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.300769] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 2a9b3b56-8607-4da8-9186-8a933cfe0351/2a9b3b56-8607-4da8-9186-8a933cfe0351.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.301407] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8895c0a-776c-47bd-8aea-0282b78021e5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.307259] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1032.307259] env[68285]: value = "task-2891834" [ 1032.307259] env[68285]: _type = "Task" [ 1032.307259] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.315649] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891834, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.525185] env[68285]: INFO nova.compute.manager [-] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Took 1.42 seconds to deallocate network for instance. [ 1032.570128] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8b3bd5ea-9565-48c8-bfa8-b6fe5dc60115 tempest-ServersV294TestFqdnHostnames-726038119 tempest-ServersV294TestFqdnHostnames-726038119-project-member] Lock "e28d0927-17c2-4256-93d4-ef0cc2c9b92a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.606s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.821099] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891834, 'name': Rename_Task, 'duration_secs': 0.135852} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.821099] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.821099] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17430d3c-dbf0-42c8-a6d5-4658475c57e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.827772] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1032.827772] env[68285]: value = "task-2891835" [ 1032.827772] env[68285]: _type = "Task" [ 1032.827772] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.835945] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.031886] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.207095] env[68285]: DEBUG nova.network.neutron [-] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.337852] env[68285]: DEBUG oslo_vmware.api [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891835, 'name': PowerOnVM_Task, 'duration_secs': 0.472522} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.338128] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.338354] env[68285]: DEBUG nova.compute.manager [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1033.339191] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038f5dd1-66a8-43c4-a20c-a5b174cd6e33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.442502] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74468bcb-3409-43b6-855f-5e5e4e236fe2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.450517] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500c3bb9-194b-4d15-84b8-b14d3146cb69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.482072] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5f6d0c-14f7-4e6b-b45f-26c36b9e702e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.489349] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff0127f-de73-4b2d-8d55-6cf06915a4d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.503645] env[68285]: DEBUG nova.compute.provider_tree [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.707417] env[68285]: DEBUG nova.compute.manager [req-3d627e6c-2d5f-44d9-b9ac-086d684ee619 req-9c3ed801-b3c0-4256-bbde-009438517bbe service nova] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Received event network-vif-deleted-567381f7-5f78-4920-beb9-db0ef3479244 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.710164] env[68285]: INFO nova.compute.manager [-] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Took 1.48 seconds to deallocate network for instance. [ 1033.810702] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "9c190abd-23ee-4e8e-8b91-9050847581d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.810941] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.857228] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.006624] env[68285]: DEBUG nova.scheduler.client.report [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.218556] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.313360] env[68285]: DEBUG nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1034.512317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.512859] env[68285]: DEBUG nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1034.516151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 51.796s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.516151] env[68285]: DEBUG nova.objects.instance [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1034.587297] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "2a9b3b56-8607-4da8-9186-8a933cfe0351" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.587696] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "2a9b3b56-8607-4da8-9186-8a933cfe0351" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.588008] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "2a9b3b56-8607-4da8-9186-8a933cfe0351-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.588320] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "2a9b3b56-8607-4da8-9186-8a933cfe0351-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.588569] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "2a9b3b56-8607-4da8-9186-8a933cfe0351-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.591263] env[68285]: INFO nova.compute.manager [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Terminating instance [ 1034.840831] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.025996] env[68285]: DEBUG nova.compute.utils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1035.027968] env[68285]: DEBUG nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1035.028233] env[68285]: DEBUG nova.network.neutron [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.094810] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "refresh_cache-2a9b3b56-8607-4da8-9186-8a933cfe0351" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.094972] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquired lock "refresh_cache-2a9b3b56-8607-4da8-9186-8a933cfe0351" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.095177] env[68285]: DEBUG nova.network.neutron [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.111862] env[68285]: DEBUG nova.policy [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080ca112b7534d1284942bdd41514e66', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '923c0329269c41159ae4469d358fe25f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1035.532468] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7898996f-c6ef-4d8d-ac0e-149165f0e4a4 tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.532468] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 42.098s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.532648] env[68285]: DEBUG nova.objects.instance [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1035.536885] env[68285]: DEBUG nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1035.541048] env[68285]: DEBUG nova.network.neutron [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Successfully created port: 70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.620711] env[68285]: DEBUG nova.network.neutron [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.689790] env[68285]: DEBUG nova.network.neutron [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.192731] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Releasing lock "refresh_cache-2a9b3b56-8607-4da8-9186-8a933cfe0351" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.196021] env[68285]: DEBUG nova.compute.manager [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.196021] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.196021] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afae2949-494f-49e6-9a05-d045290896ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.203986] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.204769] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a73b0a95-100b-429c-8cee-99086f285416 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.211979] env[68285]: DEBUG oslo_vmware.api [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1036.211979] env[68285]: value = "task-2891836" [ 1036.211979] env[68285]: _type = "Task" [ 1036.211979] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.224600] env[68285]: DEBUG oslo_vmware.api [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891836, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.464028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.464028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.464028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.464399] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.464675] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.467095] env[68285]: INFO nova.compute.manager [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Terminating instance [ 1036.550632] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d3e8b76e-b5ce-497a-bb4b-7c1ff976976d tempest-ServersAdmin275Test-1650465497 tempest-ServersAdmin275Test-1650465497-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.552800] env[68285]: DEBUG nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1036.555170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 42.059s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.555170] env[68285]: DEBUG nova.objects.instance [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1036.581066] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1036.581301] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.581458] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1036.581638] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.581780] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1036.581923] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1036.582137] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1036.582298] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1036.582460] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1036.582618] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1036.582786] env[68285]: DEBUG nova.virt.hardware [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1036.583854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b559955e-da9c-4989-bfae-bc97fa56a38e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.593284] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2488ea-b7fe-440a-9eb7-279d157497c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.722951] env[68285]: DEBUG oslo_vmware.api [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891836, 'name': PowerOffVM_Task, 'duration_secs': 0.159275} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.722951] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.723114] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.723277] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b856b7ce-7106-4560-be2c-b72fee7240d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.746388] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.746630] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.746812] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Deleting the datastore file [datastore1] 2a9b3b56-8607-4da8-9186-8a933cfe0351 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.747120] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fb0ab2c-f6ee-4962-9e09-77c73ff030c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.752628] env[68285]: DEBUG oslo_vmware.api [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for the task: (returnval){ [ 1036.752628] env[68285]: value = "task-2891838" [ 1036.752628] env[68285]: _type = "Task" [ 1036.752628] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.763544] env[68285]: DEBUG oslo_vmware.api [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.956957] env[68285]: DEBUG nova.compute.manager [req-4c97d2ce-e1ba-4344-bbcb-5a507eb3af17 req-195a52c6-9200-412c-92e0-680fec94cf58 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Received event network-vif-plugged-70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1036.956957] env[68285]: DEBUG oslo_concurrency.lockutils [req-4c97d2ce-e1ba-4344-bbcb-5a507eb3af17 req-195a52c6-9200-412c-92e0-680fec94cf58 service nova] Acquiring lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.956957] env[68285]: DEBUG oslo_concurrency.lockutils [req-4c97d2ce-e1ba-4344-bbcb-5a507eb3af17 req-195a52c6-9200-412c-92e0-680fec94cf58 service nova] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.957135] env[68285]: DEBUG oslo_concurrency.lockutils [req-4c97d2ce-e1ba-4344-bbcb-5a507eb3af17 req-195a52c6-9200-412c-92e0-680fec94cf58 service nova] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.957417] env[68285]: DEBUG nova.compute.manager [req-4c97d2ce-e1ba-4344-bbcb-5a507eb3af17 req-195a52c6-9200-412c-92e0-680fec94cf58 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] No waiting events found dispatching network-vif-plugged-70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1036.957417] env[68285]: WARNING nova.compute.manager [req-4c97d2ce-e1ba-4344-bbcb-5a507eb3af17 req-195a52c6-9200-412c-92e0-680fec94cf58 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Received unexpected event network-vif-plugged-70ff5ae8-d9d3-4840-abcd-733345a4c4af for instance with vm_state building and task_state spawning. [ 1036.971050] env[68285]: DEBUG nova.compute.manager [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.971252] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.972100] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd518cd-cd66-431d-b9e6-85f69802dfd6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.981963] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.982232] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-524a6e45-1897-4a04-bd46-97fca607641f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.988745] env[68285]: DEBUG oslo_vmware.api [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1036.988745] env[68285]: value = "task-2891839" [ 1036.988745] env[68285]: _type = "Task" [ 1036.988745] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.996683] env[68285]: DEBUG oslo_vmware.api [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891839, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.094337] env[68285]: DEBUG nova.network.neutron [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Successfully updated port: 70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.273602] env[68285]: DEBUG oslo_vmware.api [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Task: {'id': task-2891838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115338} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.273897] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.274096] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.274278] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.274450] env[68285]: INFO nova.compute.manager [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1037.274693] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.274952] env[68285]: DEBUG nova.compute.manager [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.275074] env[68285]: DEBUG nova.network.neutron [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.291534] env[68285]: DEBUG nova.network.neutron [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1037.499047] env[68285]: DEBUG oslo_vmware.api [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891839, 'name': PowerOffVM_Task, 'duration_secs': 0.211913} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.499369] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.499682] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.499808] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33032277-cf54-46ed-a316-4189441c6c36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.569342] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.569342] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.569342] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Deleting the datastore file [datastore1] 5e101d74-7a82-4118-8f4c-7af9a6b0917a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.569342] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e8ff353-289b-491e-83c7-5046198693d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.574248] env[68285]: DEBUG oslo_vmware.api [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1037.574248] env[68285]: value = "task-2891841" [ 1037.574248] env[68285]: _type = "Task" [ 1037.574248] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.575820] env[68285]: DEBUG oslo_concurrency.lockutils [None req-38827b30-962d-4ce4-8da2-9f46fdeb3c5c tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.580421] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.804s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.581407] env[68285]: INFO nova.compute.claims [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.596374] env[68285]: DEBUG oslo_vmware.api [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.598095] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.598095] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.598095] env[68285]: DEBUG nova.network.neutron [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1037.794485] env[68285]: DEBUG nova.network.neutron [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.087496] env[68285]: DEBUG oslo_vmware.api [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2891841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144213} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.087764] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.087946] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1038.088134] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1038.088305] env[68285]: INFO nova.compute.manager [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1038.088580] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.088774] env[68285]: DEBUG nova.compute.manager [-] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1038.088872] env[68285]: DEBUG nova.network.neutron [-] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1038.139139] env[68285]: DEBUG nova.network.neutron [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.464261] env[68285]: INFO nova.compute.manager [-] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Took 1.02 seconds to deallocate network for instance. [ 1038.539477] env[68285]: DEBUG nova.network.neutron [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Updating instance_info_cache with network_info: [{"id": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "address": "fa:16:3e:8a:e6:d8", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70ff5ae8-d9", "ovs_interfaceid": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.979865] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.042308] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.042658] env[68285]: DEBUG nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Instance network_info: |[{"id": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "address": "fa:16:3e:8a:e6:d8", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70ff5ae8-d9", "ovs_interfaceid": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1039.043754] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:e6:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70ff5ae8-d9d3-4840-abcd-733345a4c4af', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.051369] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1039.051592] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.051816] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3754c2b6-d9e3-4a94-a423-109df8b23836 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.067405] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5236350-dcee-4877-a6dc-b6e067fe3687 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.075126] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459250f2-9170-47cc-8108-16dee2ce52a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.079041] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.079041] env[68285]: value = "task-2891842" [ 1039.079041] env[68285]: _type = "Task" [ 1039.079041] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.109662] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcde6f24-ed26-421c-a6c8-aeec41f51f48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.114851] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891842, 'name': CreateVM_Task} progress is 15%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.120027] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fed3454-03be-4f44-ae38-3b58cec226c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.135090] env[68285]: DEBUG nova.compute.provider_tree [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.460022] env[68285]: DEBUG nova.compute.manager [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Received event network-changed-70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1039.460022] env[68285]: DEBUG nova.compute.manager [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Refreshing instance network info cache due to event network-changed-70ff5ae8-d9d3-4840-abcd-733345a4c4af. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1039.460022] env[68285]: DEBUG oslo_concurrency.lockutils [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] Acquiring lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.460022] env[68285]: DEBUG oslo_concurrency.lockutils [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] Acquired lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.460022] env[68285]: DEBUG nova.network.neutron [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Refreshing network info cache for port 70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.575652] env[68285]: DEBUG nova.network.neutron [-] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.592516] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891842, 'name': CreateVM_Task, 'duration_secs': 0.386832} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.593267] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.593949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.594133] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.594455] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1039.594937] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-506f6a93-5306-4c1b-a63f-ec43286e84a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.599840] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1039.599840] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c916dc-6eca-58e8-9c89-22cd95b36420" [ 1039.599840] env[68285]: _type = "Task" [ 1039.599840] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.607500] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c916dc-6eca-58e8-9c89-22cd95b36420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.638394] env[68285]: DEBUG nova.scheduler.client.report [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.907997] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.910621] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.080226] env[68285]: INFO nova.compute.manager [-] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Took 1.99 seconds to deallocate network for instance. [ 1040.110688] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c916dc-6eca-58e8-9c89-22cd95b36420, 'name': SearchDatastore_Task, 'duration_secs': 0.0095} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.110688] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.110688] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.110688] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.110688] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.110688] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.110688] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-229c4887-c895-4b89-a5aa-338cffb7bd81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.120206] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.120396] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.121405] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20a85aab-46ca-4f54-acff-de690f0172d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.127919] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1040.127919] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527b2813-34b2-54ca-4013-6772024222f7" [ 1040.127919] env[68285]: _type = "Task" [ 1040.127919] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.135279] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527b2813-34b2-54ca-4013-6772024222f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.145145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.145625] env[68285]: DEBUG nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1040.148166] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.527s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.150099] env[68285]: INFO nova.compute.claims [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1040.206807] env[68285]: DEBUG nova.network.neutron [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Updated VIF entry in instance network info cache for port 70ff5ae8-d9d3-4840-abcd-733345a4c4af. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1040.207189] env[68285]: DEBUG nova.network.neutron [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Updating instance_info_cache with network_info: [{"id": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "address": "fa:16:3e:8a:e6:d8", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70ff5ae8-d9", "ovs_interfaceid": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.412128] env[68285]: DEBUG nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1040.586643] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.639600] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527b2813-34b2-54ca-4013-6772024222f7, 'name': SearchDatastore_Task, 'duration_secs': 0.008568} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.640483] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2b62427-5dc0-4b62-bd27-ce2904263501 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.646421] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1040.646421] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bfd4f4-96a1-92be-0ed7-d59430673ea7" [ 1040.646421] env[68285]: _type = "Task" [ 1040.646421] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.655699] env[68285]: DEBUG nova.compute.utils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1040.658933] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bfd4f4-96a1-92be-0ed7-d59430673ea7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.659474] env[68285]: DEBUG nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1040.659663] env[68285]: DEBUG nova.network.neutron [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1040.710606] env[68285]: DEBUG oslo_concurrency.lockutils [req-c0b8d2e6-e526-4ce5-b194-5615dd455556 req-029ca009-06bc-4ddc-9659-b7780bfbff49 service nova] Releasing lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.712126] env[68285]: DEBUG nova.policy [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '557a46b01bbf41e4a343d20c8206aa96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9352aafac6e049feb8d74a91d1600224', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1040.940340] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.083071] env[68285]: DEBUG nova.network.neutron [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Successfully created port: 34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1041.158993] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bfd4f4-96a1-92be-0ed7-d59430673ea7, 'name': SearchDatastore_Task, 'duration_secs': 0.010503} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.158993] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.158993] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11/1a7d1cfc-67a5-4178-9bc2-eb8af5104d11.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.158993] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51d2c507-a114-4c82-a674-c56b7a9ac484 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.160607] env[68285]: DEBUG nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1041.176284] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1041.176284] env[68285]: value = "task-2891843" [ 1041.176284] env[68285]: _type = "Task" [ 1041.176284] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.187565] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891843, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.510311] env[68285]: DEBUG nova.compute.manager [req-bc459f79-aef9-4783-88ae-5768f09cf8f1 req-08ff780f-e8ab-47dc-9926-18329a7e0384 service nova] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Received event network-vif-deleted-462b7f0c-cb02-4c0c-aad1-3b29d59cf1f1 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.573347] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ed5850-be5d-40b4-87d7-1f3d4da1032e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.581399] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354967c1-4ddf-4632-b531-2d0674ed7ae0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.611522] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f24e29d-e1ce-4b44-a16f-67484e110955 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.619963] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28944e39-dde8-4b97-bb36-21757d1ac90b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.633054] env[68285]: DEBUG nova.compute.provider_tree [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.690397] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891843, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.137038] env[68285]: DEBUG nova.scheduler.client.report [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.186805] env[68285]: DEBUG nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1042.192994] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891843, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.281856] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1042.282125] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.282297] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1042.282482] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.282645] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1042.282798] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1042.283028] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1042.283193] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1042.283373] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1042.283540] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1042.283714] env[68285]: DEBUG nova.virt.hardware [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1042.285009] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc86078-5406-44c7-8bff-0eb07aeff6f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.292982] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818556c3-d280-4f82-8317-b8fab87a8fff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.642659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.643197] env[68285]: DEBUG nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1042.645726] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.592s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.645946] env[68285]: DEBUG nova.objects.instance [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lazy-loading 'resources' on Instance uuid 32d23c62-23ec-4732-a95d-6ac32805e1b9 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.690764] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891843, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.433767} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.691039] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11/1a7d1cfc-67a5-4178-9bc2-eb8af5104d11.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.691258] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.691505] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee105f23-be4f-4be0-8aa2-0d09a5a18ec9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.698921] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1042.698921] env[68285]: value = "task-2891844" [ 1042.698921] env[68285]: _type = "Task" [ 1042.698921] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.706832] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891844, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.764014] env[68285]: DEBUG nova.network.neutron [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Successfully updated port: 34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1043.149539] env[68285]: DEBUG nova.compute.utils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1043.150953] env[68285]: DEBUG nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1043.151139] env[68285]: DEBUG nova.network.neutron [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1043.186509] env[68285]: DEBUG nova.policy [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64faebf5ce1549fe938f12248656d8d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2727048b316143c7bfa2aef4f9b264f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1043.209559] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891844, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066948} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.212306] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.213426] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8d8b39-3a9b-433a-904b-481fe2b2a762 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.238265] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11/1a7d1cfc-67a5-4178-9bc2-eb8af5104d11.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.241084] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aede55d0-a99a-413d-a0a0-d3b91eab0a40 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.262039] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1043.262039] env[68285]: value = "task-2891845" [ 1043.262039] env[68285]: _type = "Task" [ 1043.262039] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.269094] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.269094] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.269094] env[68285]: DEBUG nova.network.neutron [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.273688] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891845, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.543900] env[68285]: DEBUG nova.compute.manager [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Received event network-vif-plugged-34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.544178] env[68285]: DEBUG oslo_concurrency.lockutils [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] Acquiring lock "f2696c7f-5676-403a-87e0-fb0884866005-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.544411] env[68285]: DEBUG oslo_concurrency.lockutils [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] Lock "f2696c7f-5676-403a-87e0-fb0884866005-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.544597] env[68285]: DEBUG oslo_concurrency.lockutils [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] Lock "f2696c7f-5676-403a-87e0-fb0884866005-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.544783] env[68285]: DEBUG nova.compute.manager [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] No waiting events found dispatching network-vif-plugged-34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.544969] env[68285]: WARNING nova.compute.manager [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Received unexpected event network-vif-plugged-34b304ef-32d9-464e-98cc-8226f6f71037 for instance with vm_state building and task_state spawning. [ 1043.545206] env[68285]: DEBUG nova.compute.manager [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Received event network-changed-34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.545341] env[68285]: DEBUG nova.compute.manager [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Refreshing instance network info cache due to event network-changed-34b304ef-32d9-464e-98cc-8226f6f71037. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1043.545521] env[68285]: DEBUG oslo_concurrency.lockutils [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] Acquiring lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.568778] env[68285]: DEBUG nova.network.neutron [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Successfully created port: 025d1a2b-ae65-4a5c-a90f-66fabc72e11c {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.630947] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688a6b01-c5c8-43d5-a896-a4a85d396ad4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.638133] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b04925-e9ae-4f1d-879b-e240eac7336f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.667149] env[68285]: DEBUG nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1043.670283] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f023c326-283d-4e25-85fd-fcf3ce5c5ab4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.678172] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb11f66c-b1b3-46de-b703-c7d622008f5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.692242] env[68285]: DEBUG nova.compute.provider_tree [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.776243] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891845, 'name': ReconfigVM_Task, 'duration_secs': 0.348496} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.776243] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11/1a7d1cfc-67a5-4178-9bc2-eb8af5104d11.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.776243] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75756cde-4a21-4d31-97b3-039f73cb9b66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.782499] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1043.782499] env[68285]: value = "task-2891846" [ 1043.782499] env[68285]: _type = "Task" [ 1043.782499] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.797233] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891846, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.815669] env[68285]: DEBUG nova.network.neutron [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.953012] env[68285]: DEBUG nova.network.neutron [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Updating instance_info_cache with network_info: [{"id": "34b304ef-32d9-464e-98cc-8226f6f71037", "address": "fa:16:3e:4c:43:4d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b304ef-32", "ovs_interfaceid": "34b304ef-32d9-464e-98cc-8226f6f71037", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.198703] env[68285]: DEBUG nova.scheduler.client.report [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1044.292521] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891846, 'name': Rename_Task, 'duration_secs': 0.139392} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.292790] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.293036] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a8b3833-975f-4d9c-a709-489e44905b57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.299958] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1044.299958] env[68285]: value = "task-2891847" [ 1044.299958] env[68285]: _type = "Task" [ 1044.299958] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.307046] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891847, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.456726] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.457093] env[68285]: DEBUG nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Instance network_info: |[{"id": "34b304ef-32d9-464e-98cc-8226f6f71037", "address": "fa:16:3e:4c:43:4d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b304ef-32", "ovs_interfaceid": "34b304ef-32d9-464e-98cc-8226f6f71037", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1044.457404] env[68285]: DEBUG oslo_concurrency.lockutils [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] Acquired lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.457585] env[68285]: DEBUG nova.network.neutron [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Refreshing network info cache for port 34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1044.458875] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:43:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34b304ef-32d9-464e-98cc-8226f6f71037', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.468062] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1044.471311] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.472033] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b83fa364-c394-4ce2-951d-3a8d98880654 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.492703] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.492703] env[68285]: value = "task-2891848" [ 1044.492703] env[68285]: _type = "Task" [ 1044.492703] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.500086] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891848, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.680986] env[68285]: DEBUG nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1044.704054] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.707357] env[68285]: DEBUG nova.network.neutron [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Updated VIF entry in instance network info cache for port 34b304ef-32d9-464e-98cc-8226f6f71037. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.707357] env[68285]: DEBUG nova.network.neutron [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Updating instance_info_cache with network_info: [{"id": "34b304ef-32d9-464e-98cc-8226f6f71037", "address": "fa:16:3e:4c:43:4d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b304ef-32", "ovs_interfaceid": "34b304ef-32d9-464e-98cc-8226f6f71037", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.709018] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.620s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.709958] env[68285]: DEBUG nova.objects.instance [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lazy-loading 'resources' on Instance uuid 7bef3e2a-00ab-480a-aa8c-335635ee5d31 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.713286] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1044.716665] env[68285]: DEBUG nova.virt.hardware [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1044.716665] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f832db58-61c9-425b-acc5-387ce6f76a3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.726712] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db97f3f0-6e80-4615-a51c-80578079c804 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.747197] env[68285]: INFO nova.scheduler.client.report [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted allocations for instance 32d23c62-23ec-4732-a95d-6ac32805e1b9 [ 1044.809902] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891847, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.002886] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891848, 'name': CreateVM_Task, 'duration_secs': 0.341822} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.002886] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.003173] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.003341] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.003661] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1045.003911] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05a07d90-c412-4337-8084-a4411e6cd823 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.008141] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1045.008141] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c50622-506e-5e7d-ab90-3086add552e5" [ 1045.008141] env[68285]: _type = "Task" [ 1045.008141] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.015908] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c50622-506e-5e7d-ab90-3086add552e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.211810] env[68285]: DEBUG oslo_concurrency.lockutils [req-2efe91b4-5912-4985-8550-5d936618c289 req-047ed5c4-4892-4395-b0ff-6043782a631d service nova] Releasing lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.258330] env[68285]: DEBUG oslo_concurrency.lockutils [None req-adad3cf5-ad69-4220-a5f5-1e1c45756ff6 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "32d23c62-23ec-4732-a95d-6ac32805e1b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.679s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.315682] env[68285]: DEBUG oslo_vmware.api [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2891847, 'name': PowerOnVM_Task, 'duration_secs': 0.51868} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.315949] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.316161] env[68285]: INFO nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1045.316344] env[68285]: DEBUG nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.317145] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acb7534-7ff1-4495-81cf-b83863c6ae79 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.519642] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c50622-506e-5e7d-ab90-3086add552e5, 'name': SearchDatastore_Task, 'duration_secs': 0.010494} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.520098] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.520457] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.520817] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.521102] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.521412] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1045.521775] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-816995e7-4823-4a26-9925-4ce61be92a3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.529948] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1045.530289] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1045.531100] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26fc60c5-44c9-41e2-95f2-186407054bcd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.538280] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1045.538280] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d2cf54-e381-7975-7894-d35d8eea1ccd" [ 1045.538280] env[68285]: _type = "Task" [ 1045.538280] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.545146] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d2cf54-e381-7975-7894-d35d8eea1ccd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.591155] env[68285]: DEBUG nova.network.neutron [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Successfully updated port: 025d1a2b-ae65-4a5c-a90f-66fabc72e11c {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.626010] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b08d6d9-facc-4f5e-8528-c2b67b7c2729 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.632612] env[68285]: DEBUG nova.compute.manager [req-c29d3b89-4de3-43ac-9c4d-78b863b36b23 req-c54d6818-0f86-4886-9d45-f7fd98c82838 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Received event network-vif-plugged-025d1a2b-ae65-4a5c-a90f-66fabc72e11c {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.632612] env[68285]: DEBUG oslo_concurrency.lockutils [req-c29d3b89-4de3-43ac-9c4d-78b863b36b23 req-c54d6818-0f86-4886-9d45-f7fd98c82838 service nova] Acquiring lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.632612] env[68285]: DEBUG oslo_concurrency.lockutils [req-c29d3b89-4de3-43ac-9c4d-78b863b36b23 req-c54d6818-0f86-4886-9d45-f7fd98c82838 service nova] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.633098] env[68285]: DEBUG oslo_concurrency.lockutils [req-c29d3b89-4de3-43ac-9c4d-78b863b36b23 req-c54d6818-0f86-4886-9d45-f7fd98c82838 service nova] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.633098] env[68285]: DEBUG nova.compute.manager [req-c29d3b89-4de3-43ac-9c4d-78b863b36b23 req-c54d6818-0f86-4886-9d45-f7fd98c82838 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] No waiting events found dispatching network-vif-plugged-025d1a2b-ae65-4a5c-a90f-66fabc72e11c {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.633098] env[68285]: WARNING nova.compute.manager [req-c29d3b89-4de3-43ac-9c4d-78b863b36b23 req-c54d6818-0f86-4886-9d45-f7fd98c82838 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Received unexpected event network-vif-plugged-025d1a2b-ae65-4a5c-a90f-66fabc72e11c for instance with vm_state building and task_state spawning. [ 1045.636137] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f5e27f-7b91-4bd9-9dc6-fee70cdcf4e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.669686] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7fb168-468a-46c7-9d5a-998fced359cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.676958] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f505b699-d813-46d9-89af-da9cf4f1d355 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.690381] env[68285]: DEBUG nova.compute.provider_tree [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.847225] env[68285]: INFO nova.compute.manager [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Took 64.87 seconds to build instance. [ 1046.046394] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d2cf54-e381-7975-7894-d35d8eea1ccd, 'name': SearchDatastore_Task, 'duration_secs': 0.008586} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.047189] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58bade91-209f-454d-9c1c-612636c8f55c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.052212] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1046.052212] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5221e6a8-7fd8-8f96-a5bc-9ed4807150a4" [ 1046.052212] env[68285]: _type = "Task" [ 1046.052212] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.059650] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5221e6a8-7fd8-8f96-a5bc-9ed4807150a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.093349] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.093484] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.093632] env[68285]: DEBUG nova.network.neutron [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.193359] env[68285]: DEBUG nova.scheduler.client.report [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1046.349788] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a411f79-5a85-4974-9b2c-1b6ca6c6ad04 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.379s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.563113] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5221e6a8-7fd8-8f96-a5bc-9ed4807150a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009228} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.563433] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.563709] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f2696c7f-5676-403a-87e0-fb0884866005/f2696c7f-5676-403a-87e0-fb0884866005.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.563970] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2bd6b18-4b33-46c7-891f-f7dbb89e1cc2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.570327] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1046.570327] env[68285]: value = "task-2891849" [ 1046.570327] env[68285]: _type = "Task" [ 1046.570327] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.577792] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.646068] env[68285]: DEBUG nova.network.neutron [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1046.698490] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.700841] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.265s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.701095] env[68285]: DEBUG nova.objects.instance [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lazy-loading 'resources' on Instance uuid 9569d50c-d358-4cc5-a106-32da785e4765 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.731782] env[68285]: INFO nova.scheduler.client.report [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Deleted allocations for instance 7bef3e2a-00ab-480a-aa8c-335635ee5d31 [ 1046.875275] env[68285]: DEBUG nova.network.neutron [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance_info_cache with network_info: [{"id": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "address": "fa:16:3e:e1:f7:d0", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1a2b-ae", "ovs_interfaceid": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.085821] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445172} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.085821] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f2696c7f-5676-403a-87e0-fb0884866005/f2696c7f-5676-403a-87e0-fb0884866005.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.085821] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.085821] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43b74d2d-fa04-4099-bbf3-9f2b233b768e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.090100] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1047.090100] env[68285]: value = "task-2891850" [ 1047.090100] env[68285]: _type = "Task" [ 1047.090100] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.100042] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.246097] env[68285]: DEBUG oslo_concurrency.lockutils [None req-339cdf24-1d0a-4fb7-b96d-dd792e5b369e tempest-ServerRescueTestJSONUnderV235-12826845 tempest-ServerRescueTestJSONUnderV235-12826845-project-member] Lock "7bef3e2a-00ab-480a-aa8c-335635ee5d31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.369s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.381292] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.381602] env[68285]: DEBUG nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Instance network_info: |[{"id": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "address": "fa:16:3e:e1:f7:d0", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1a2b-ae", "ovs_interfaceid": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1047.382044] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:f7:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '025d1a2b-ae65-4a5c-a90f-66fabc72e11c', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.389797] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.392713] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.393196] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4612360-97a6-4af4-a13c-a5a66f8fdcec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.418091] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.418091] env[68285]: value = "task-2891851" [ 1047.418091] env[68285]: _type = "Task" [ 1047.418091] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.429079] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891851, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.604092] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06187} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.604495] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.605411] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed59b11-a8fd-4e29-a8c1-9f2ee171c2c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.633999] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] f2696c7f-5676-403a-87e0-fb0884866005/f2696c7f-5676-403a-87e0-fb0884866005.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.637264] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18269518-b104-4393-b079-5409c932ff45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.661826] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1047.661826] env[68285]: value = "task-2891852" [ 1047.661826] env[68285]: _type = "Task" [ 1047.661826] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.670628] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891852, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.673382] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd4a109-a788-4371-aafc-e372b98b6c2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.677458] env[68285]: DEBUG nova.compute.manager [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Received event network-changed-025d1a2b-ae65-4a5c-a90f-66fabc72e11c {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1047.677661] env[68285]: DEBUG nova.compute.manager [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Refreshing instance network info cache due to event network-changed-025d1a2b-ae65-4a5c-a90f-66fabc72e11c. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1047.677879] env[68285]: DEBUG oslo_concurrency.lockutils [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] Acquiring lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.678034] env[68285]: DEBUG oslo_concurrency.lockutils [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] Acquired lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.678201] env[68285]: DEBUG nova.network.neutron [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Refreshing network info cache for port 025d1a2b-ae65-4a5c-a90f-66fabc72e11c {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.690471] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467337ff-019f-48c0-9b0e-cd631a05aed6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.722142] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c9299d-7b85-470f-afd1-c107fff87d55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.729786] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adaeeed5-ad88-472b-abab-1c79910a1d3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.743704] env[68285]: DEBUG nova.compute.provider_tree [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.927649] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891851, 'name': CreateVM_Task, 'duration_secs': 0.355593} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.927837] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1047.928568] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.928746] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.929079] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1047.929347] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79b83904-1794-41b7-b710-4e535e078c9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.933857] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1047.933857] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52651df9-0d41-6834-7ba5-a3db2ae24935" [ 1047.933857] env[68285]: _type = "Task" [ 1047.933857] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.941860] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52651df9-0d41-6834-7ba5-a3db2ae24935, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.171948] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891852, 'name': ReconfigVM_Task, 'duration_secs': 0.286499} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.175644] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Reconfigured VM instance instance-00000043 to attach disk [datastore1] f2696c7f-5676-403a-87e0-fb0884866005/f2696c7f-5676-403a-87e0-fb0884866005.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.175644] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-640523b1-4e98-420c-a8fe-f734f5087dbd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.180205] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1048.180205] env[68285]: value = "task-2891853" [ 1048.180205] env[68285]: _type = "Task" [ 1048.180205] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.197026] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891853, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.247556] env[68285]: DEBUG nova.scheduler.client.report [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.448023] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52651df9-0d41-6834-7ba5-a3db2ae24935, 'name': SearchDatastore_Task, 'duration_secs': 0.013221} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.448023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.448023] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.448023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.448023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.448023] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.448023] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47001190-8eec-47d0-9c9c-f03754394bd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.455992] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.455992] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1048.456245] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db41bd3b-4e27-4429-822b-508bc537b373 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.462039] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1048.462039] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52062030-5915-323d-466b-9178b3e59b2b" [ 1048.462039] env[68285]: _type = "Task" [ 1048.462039] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.469796] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52062030-5915-323d-466b-9178b3e59b2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.525323] env[68285]: DEBUG nova.network.neutron [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updated VIF entry in instance network info cache for port 025d1a2b-ae65-4a5c-a90f-66fabc72e11c. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1048.525323] env[68285]: DEBUG nova.network.neutron [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance_info_cache with network_info: [{"id": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "address": "fa:16:3e:e1:f7:d0", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1a2b-ae", "ovs_interfaceid": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.692586] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891853, 'name': Rename_Task, 'duration_secs': 0.143232} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.692706] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.692904] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60870355-6c97-41e9-8e9a-f03f3a896603 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.699140] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1048.699140] env[68285]: value = "task-2891854" [ 1048.699140] env[68285]: _type = "Task" [ 1048.699140] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.706804] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.753484] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.053s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.755904] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.009s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.756159] env[68285]: DEBUG nova.objects.instance [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lazy-loading 'resources' on Instance uuid 8a848ec8-1ae0-4437-be4f-49219214d11f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.780105] env[68285]: INFO nova.scheduler.client.report [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted allocations for instance 9569d50c-d358-4cc5-a106-32da785e4765 [ 1048.973279] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52062030-5915-323d-466b-9178b3e59b2b, 'name': SearchDatastore_Task, 'duration_secs': 0.008627} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.974247] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd8d7392-c7b4-45c9-bfb1-82d4d4b44054 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.979655] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1048.979655] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d07128-5b64-b802-7120-7e2a6314488c" [ 1048.979655] env[68285]: _type = "Task" [ 1048.979655] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.987794] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d07128-5b64-b802-7120-7e2a6314488c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.026607] env[68285]: DEBUG oslo_concurrency.lockutils [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] Releasing lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.026889] env[68285]: DEBUG nova.compute.manager [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Received event network-changed-70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1049.027085] env[68285]: DEBUG nova.compute.manager [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Refreshing instance network info cache due to event network-changed-70ff5ae8-d9d3-4840-abcd-733345a4c4af. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1049.027316] env[68285]: DEBUG oslo_concurrency.lockutils [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] Acquiring lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.027481] env[68285]: DEBUG oslo_concurrency.lockutils [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] Acquired lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.027647] env[68285]: DEBUG nova.network.neutron [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Refreshing network info cache for port 70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1049.210429] env[68285]: DEBUG oslo_vmware.api [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891854, 'name': PowerOnVM_Task, 'duration_secs': 0.47948} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.210934] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.210934] env[68285]: INFO nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Took 7.02 seconds to spawn the instance on the hypervisor. [ 1049.211102] env[68285]: DEBUG nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.211944] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7abcc29-1835-4fdc-b3a0-5f53839720a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.293065] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ec65117b-8b89-4588-ba71-575889d95c37 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "9569d50c-d358-4cc5-a106-32da785e4765" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.390s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.489733] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d07128-5b64-b802-7120-7e2a6314488c, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.490185] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.490704] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1f5fe064-0443-4b7f-911a-45d803836eeb/1f5fe064-0443-4b7f-911a-45d803836eeb.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1049.493211] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88520839-d456-4402-8a1d-2b4c3a6c3ae7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.501568] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1049.501568] env[68285]: value = "task-2891855" [ 1049.501568] env[68285]: _type = "Task" [ 1049.501568] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.515938] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.647668] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a880fe85-8cad-41ae-9d15-24303c7106e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.655861] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b368232-6529-4f99-a368-0692778bee5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.689393] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51974937-85f8-4dd3-a737-315b73411fc7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.697904] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93190f17-fac3-4de5-9fbf-976baff7bab6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.714404] env[68285]: DEBUG nova.compute.provider_tree [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.734300] env[68285]: INFO nova.compute.manager [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Took 53.11 seconds to build instance. [ 1049.962622] env[68285]: DEBUG nova.network.neutron [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Updated VIF entry in instance network info cache for port 70ff5ae8-d9d3-4840-abcd-733345a4c4af. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1049.963025] env[68285]: DEBUG nova.network.neutron [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Updating instance_info_cache with network_info: [{"id": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "address": "fa:16:3e:8a:e6:d8", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70ff5ae8-d9", "ovs_interfaceid": "70ff5ae8-d9d3-4840-abcd-733345a4c4af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.010978] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441032} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.011181] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1f5fe064-0443-4b7f-911a-45d803836eeb/1f5fe064-0443-4b7f-911a-45d803836eeb.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1050.011400] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1050.011676] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6ae84e7-a320-4c85-a58c-908920b03481 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.017745] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1050.017745] env[68285]: value = "task-2891856" [ 1050.017745] env[68285]: _type = "Task" [ 1050.017745] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.026311] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.217969] env[68285]: DEBUG nova.scheduler.client.report [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1050.236653] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fda92a9a-2b3e-4cf9-8aaf-40c5b030ba64 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.636s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.466137] env[68285]: DEBUG oslo_concurrency.lockutils [req-5aaa2ae3-fc60-4e53-84a0-53a145506715 req-c8acef4c-2ad8-4a4b-b3d9-91b840ef7b73 service nova] Releasing lock "refresh_cache-1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.528941] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142671} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.528941] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.529647] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29780a4-d5ac-42a9-9769-cb2db02635fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.551636] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 1f5fe064-0443-4b7f-911a-45d803836eeb/1f5fe064-0443-4b7f-911a-45d803836eeb.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.551929] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14c4cbba-3a8f-46a6-ae16-bbd6a329e958 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.572872] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1050.572872] env[68285]: value = "task-2891857" [ 1050.572872] env[68285]: _type = "Task" [ 1050.572872] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.581025] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891857, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.699518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.699811] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.724742] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.726991] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.254s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.727400] env[68285]: DEBUG nova.objects.instance [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lazy-loading 'resources' on Instance uuid f1b8808d-c3a1-4be6-b6ec-ed441291e8f2 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.740603] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "d025b807-fda4-4aff-beac-0ad6a092fe74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.740859] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.749697] env[68285]: INFO nova.scheduler.client.report [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Deleted allocations for instance 8a848ec8-1ae0-4437-be4f-49219214d11f [ 1051.002199] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "f2696c7f-5676-403a-87e0-fb0884866005" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.002447] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.002647] env[68285]: INFO nova.compute.manager [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Shelving [ 1051.084115] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891857, 'name': ReconfigVM_Task, 'duration_secs': 0.291207} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.084449] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 1f5fe064-0443-4b7f-911a-45d803836eeb/1f5fe064-0443-4b7f-911a-45d803836eeb.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.085069] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3a85d74-6e24-42cf-8581-f137db20c104 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.090845] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1051.090845] env[68285]: value = "task-2891858" [ 1051.090845] env[68285]: _type = "Task" [ 1051.090845] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.098309] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891858, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.202625] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1051.244305] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1051.257048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9aa906c9-ab57-4e96-9372-98c106d3239a tempest-ServersAdmin275Test-205570985 tempest-ServersAdmin275Test-205570985-project-member] Lock "8a848ec8-1ae0-4437-be4f-49219214d11f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.817s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.549031] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6969e79-040b-4f85-8fd2-05b61d4fd2c2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.556057] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b37345-22a3-4c9c-b8b4-4f1457f18665 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.587708] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf10de31-a947-4c45-8988-9645c02dae23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.597564] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4970b3a5-5479-449a-a62d-1e54501b41d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.604334] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891858, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.613563] env[68285]: DEBUG nova.compute.provider_tree [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.722496] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.765511] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.011743] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.012317] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75ca2931-9a56-43ec-92fe-fdecd7fe0cdf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.019285] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1052.019285] env[68285]: value = "task-2891859" [ 1052.019285] env[68285]: _type = "Task" [ 1052.019285] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.027271] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891859, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.102420] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891858, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.116547] env[68285]: DEBUG nova.scheduler.client.report [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.529662] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891859, 'name': PowerOffVM_Task, 'duration_secs': 0.18479} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.529962] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.530769] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b09f54-b5a4-4cf7-8d27-747e94cc42ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.548466] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0538fc60-6816-4c3b-acaf-7d5d4f63b7e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.603233] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891858, 'name': Rename_Task, 'duration_secs': 1.155274} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.603541] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.603785] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe84254e-66c4-42fb-ab53-61703f98af0e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.610845] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1052.610845] env[68285]: value = "task-2891860" [ 1052.610845] env[68285]: _type = "Task" [ 1052.610845] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.619934] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891860, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.621829] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.624202] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.419s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.624624] env[68285]: DEBUG nova.objects.instance [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lazy-loading 'resources' on Instance uuid 1b9dd0e2-781f-43d7-a66e-e718a0972c78 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.646236] env[68285]: INFO nova.scheduler.client.report [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted allocations for instance f1b8808d-c3a1-4be6-b6ec-ed441291e8f2 [ 1053.062061] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1053.062491] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-28d0c615-7fbd-4fd7-87d6-36e62aba42fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.070968] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1053.070968] env[68285]: value = "task-2891861" [ 1053.070968] env[68285]: _type = "Task" [ 1053.070968] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.081797] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891861, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.122936] env[68285]: DEBUG oslo_vmware.api [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891860, 'name': PowerOnVM_Task, 'duration_secs': 0.476729} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.123330] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.123616] env[68285]: INFO nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Took 8.44 seconds to spawn the instance on the hypervisor. [ 1053.123876] env[68285]: DEBUG nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.124961] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e8bff7-87a0-43e9-b720-8ed2fe63110a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.157617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d1cd7aea-3b30-4781-b4ed-f32a8e4c4b10 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "f1b8808d-c3a1-4be6-b6ec-ed441291e8f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.223s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.583891] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891861, 'name': CreateSnapshot_Task, 'duration_secs': 0.392121} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.584220] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1053.584965] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61822fd6-8acb-4a4b-8809-723c82ab56fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.648558] env[68285]: INFO nova.compute.manager [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Took 56.04 seconds to build instance. [ 1053.724597] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14239fa-e718-4c0b-a000-da6f9bc89cbe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.732695] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5a30e1-683c-4395-ad9a-15346255b55b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.763815] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e830bf-2317-4260-a417-b06dd327ece6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.771052] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecb6266-b838-4f6f-abdd-0619cb23a41f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.786575] env[68285]: DEBUG nova.compute.provider_tree [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.106042] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1054.106372] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ae1e44d2-50fa-43e0-bf1d-70ada8b27ba1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.114574] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1054.114574] env[68285]: value = "task-2891862" [ 1054.114574] env[68285]: _type = "Task" [ 1054.114574] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.122666] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891862, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.150427] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e376601-121a-40fd-89fa-97488f07d80d tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.563s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.290149] env[68285]: DEBUG nova.scheduler.client.report [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.626469] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891862, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.795549] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.797953] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 53.381s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.798198] env[68285]: DEBUG nova.objects.instance [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lazy-loading 'resources' on Instance uuid 52fbfbe4-1807-4d6d-9139-ebe30e6bf647 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.819355] env[68285]: INFO nova.scheduler.client.report [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted allocations for instance 1b9dd0e2-781f-43d7-a66e-e718a0972c78 [ 1055.125169] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891862, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.333761] env[68285]: DEBUG oslo_concurrency.lockutils [None req-596bde4d-f11a-4e60-9396-39929fe85402 tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "1b9dd0e2-781f-43d7-a66e-e718a0972c78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.556s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.623679] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb3b2c2-eaca-4d50-a411-024422e1d958 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.630034] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891862, 'name': CloneVM_Task, 'duration_secs': 1.27201} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.630034] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Created linked-clone VM from snapshot [ 1055.630721] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebf04c9-24d9-4646-ba16-5a65f1dcb836 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.636046] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acc47b5-0203-4272-bb3f-caeaa76561e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.642139] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "60144efd-061e-4144-9541-b2321c9b0ec1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.642373] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "60144efd-061e-4144-9541-b2321c9b0ec1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.642582] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "60144efd-061e-4144-9541-b2321c9b0ec1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.642782] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "60144efd-061e-4144-9541-b2321c9b0ec1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.642964] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "60144efd-061e-4144-9541-b2321c9b0ec1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.644602] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Uploading image 6fd33489-77cb-4438-9011-563e08865919 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1055.646918] env[68285]: INFO nova.compute.manager [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Terminating instance [ 1055.679173] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bcf052-6454-4308-9ad0-691a0737b877 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.687197] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19913184-7ed1-43c0-a104-fc4396cca0ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.692918] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1055.692918] env[68285]: value = "vm-580969" [ 1055.692918] env[68285]: _type = "VirtualMachine" [ 1055.692918] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1055.693431] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f900a41a-d3df-4c24-8bd7-81bfe74f8f9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.705130] env[68285]: DEBUG nova.compute.provider_tree [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.707143] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lease: (returnval){ [ 1055.707143] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dd56d2-9747-9a6e-fb01-0e9bd2a88d3f" [ 1055.707143] env[68285]: _type = "HttpNfcLease" [ 1055.707143] env[68285]: } obtained for exporting VM: (result){ [ 1055.707143] env[68285]: value = "vm-580969" [ 1055.707143] env[68285]: _type = "VirtualMachine" [ 1055.707143] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1055.707553] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the lease: (returnval){ [ 1055.707553] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dd56d2-9747-9a6e-fb01-0e9bd2a88d3f" [ 1055.707553] env[68285]: _type = "HttpNfcLease" [ 1055.707553] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1055.714486] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1055.714486] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dd56d2-9747-9a6e-fb01-0e9bd2a88d3f" [ 1055.714486] env[68285]: _type = "HttpNfcLease" [ 1055.714486] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1055.714716] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1055.714716] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52dd56d2-9747-9a6e-fb01-0e9bd2a88d3f" [ 1055.714716] env[68285]: _type = "HttpNfcLease" [ 1055.714716] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1055.715424] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cf7bda-62bf-436e-ad8b-bfa4b5489d6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.723381] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5248aa3a-2745-7a7f-58e7-db260153fab8/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1055.723469] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5248aa3a-2745-7a7f-58e7-db260153fab8/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1055.786280] env[68285]: DEBUG nova.compute.manager [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1055.817045] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f03354b1-740b-48dc-8f00-b1a7e5be1a86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.150811] env[68285]: DEBUG nova.compute.manager [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.151174] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.151934] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf59103-119d-4241-a4ff-447f79b69d81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.160307] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.160645] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-878b3b63-e3de-4ae9-aab6-62a9a75f2102 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.167187] env[68285]: DEBUG oslo_vmware.api [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 1056.167187] env[68285]: value = "task-2891864" [ 1056.167187] env[68285]: _type = "Task" [ 1056.167187] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.175510] env[68285]: DEBUG oslo_vmware.api [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.208954] env[68285]: DEBUG nova.scheduler.client.report [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.307173] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.677905] env[68285]: DEBUG oslo_vmware.api [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891864, 'name': PowerOffVM_Task, 'duration_secs': 0.234024} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.678198] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.678369] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.678626] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf0c22cf-f1e5-4591-8bdb-5e2ec4f5c3a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.715526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.917s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.718030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 53.707s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.734701] env[68285]: INFO nova.scheduler.client.report [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Deleted allocations for instance 52fbfbe4-1807-4d6d-9139-ebe30e6bf647 [ 1057.224596] env[68285]: INFO nova.compute.claims [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.243083] env[68285]: DEBUG oslo_concurrency.lockutils [None req-772040df-fe68-4854-9589-17b3f6871f8a tempest-VolumesAdminNegativeTest-1701843704 tempest-VolumesAdminNegativeTest-1701843704-project-member] Lock "52fbfbe4-1807-4d6d-9139-ebe30e6bf647" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 59.018s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.249623] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.249867] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.250075] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleting the datastore file [datastore1] 60144efd-061e-4144-9541-b2321c9b0ec1 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.250343] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8587f4b2-b27b-45cd-ac3d-06db870c4de8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.258386] env[68285]: DEBUG oslo_vmware.api [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 1057.258386] env[68285]: value = "task-2891866" [ 1057.258386] env[68285]: _type = "Task" [ 1057.258386] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.267385] env[68285]: DEBUG oslo_vmware.api [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.732242] env[68285]: INFO nova.compute.resource_tracker [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating resource usage from migration e2bf8756-19fa-4126-b4c6-11b9b79ae81d [ 1057.771505] env[68285]: DEBUG oslo_vmware.api [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.244606} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.771769] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.771954] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.772143] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.772317] env[68285]: INFO nova.compute.manager [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1057.773277] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.773277] env[68285]: DEBUG nova.compute.manager [-] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.773277] env[68285]: DEBUG nova.network.neutron [-] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.121559] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ea9d40-5d9b-45cf-b89e-5a45bb8dfe5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.129740] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ef6579-63d3-4f6d-8411-b5f63189bbbf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.167166] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec3d12d-d099-43ec-bf59-68d4059f249f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.177236] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969fde43-1c24-4740-8c7e-818a87ffab44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.192711] env[68285]: DEBUG nova.compute.provider_tree [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.449768] env[68285]: DEBUG nova.compute.manager [req-89f5f3fc-5438-4509-90f7-429aaa3a3a0f req-a09c9219-0d2d-4296-b5bb-d8ee300467b2 service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Received event network-vif-deleted-23fd8792-3c59-451c-9424-1043ad4846a0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.450094] env[68285]: INFO nova.compute.manager [req-89f5f3fc-5438-4509-90f7-429aaa3a3a0f req-a09c9219-0d2d-4296-b5bb-d8ee300467b2 service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Neutron deleted interface 23fd8792-3c59-451c-9424-1043ad4846a0; detaching it from the instance and deleting it from the info cache [ 1058.450209] env[68285]: DEBUG nova.network.neutron [req-89f5f3fc-5438-4509-90f7-429aaa3a3a0f req-a09c9219-0d2d-4296-b5bb-d8ee300467b2 service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.635816] env[68285]: DEBUG nova.network.neutron [-] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.717469] env[68285]: ERROR nova.scheduler.client.report [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [req-eb7fc622-29da-48d8-aacf-ab7763f49c2c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eb7fc622-29da-48d8-aacf-ab7763f49c2c"}]} [ 1058.736183] env[68285]: DEBUG nova.scheduler.client.report [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1058.751680] env[68285]: DEBUG nova.scheduler.client.report [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1058.751998] env[68285]: DEBUG nova.compute.provider_tree [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.763463] env[68285]: DEBUG nova.scheduler.client.report [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1058.783376] env[68285]: DEBUG nova.scheduler.client.report [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1058.957026] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47a37ddc-aeec-4874-831b-3fa0c7e7f02b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.965384] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ddb2b7-2208-4351-9ec8-6c474929dd2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.999306] env[68285]: DEBUG nova.compute.manager [req-89f5f3fc-5438-4509-90f7-429aaa3a3a0f req-a09c9219-0d2d-4296-b5bb-d8ee300467b2 service nova] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Detach interface failed, port_id=23fd8792-3c59-451c-9424-1043ad4846a0, reason: Instance 60144efd-061e-4144-9541-b2321c9b0ec1 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1059.138982] env[68285]: INFO nova.compute.manager [-] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Took 1.37 seconds to deallocate network for instance. [ 1059.231318] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886435d5-bfb1-4448-aeb0-513b14cbcf61 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.241952] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1d2939-c680-459e-b437-19786878b19a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.277096] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e459cc6-13b9-44ce-a735-1f8ea885d762 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.286209] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee50d457-ec97-4a97-ba3a-ad9331660ba7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.300699] env[68285]: DEBUG nova.compute.provider_tree [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.651129] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.834053] env[68285]: DEBUG nova.scheduler.client.report [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 101 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1059.834358] env[68285]: DEBUG nova.compute.provider_tree [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 101 to 102 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1059.834541] env[68285]: DEBUG nova.compute.provider_tree [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1060.344023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.624s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.344023] env[68285]: INFO nova.compute.manager [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Migrating [ 1060.350641] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 53.471s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.870770] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.871084] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.871084] env[68285]: DEBUG nova.network.neutron [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.373888] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Applying migration context for instance 34aeba05-804e-444c-8e58-69c7721b10b1 as it has an incoming, in-progress migration e2bf8756-19fa-4126-b4c6-11b9b79ae81d. Migration status is pre-migrating {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1061.375413] env[68285]: INFO nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating resource usage from migration e2bf8756-19fa-4126-b4c6-11b9b79ae81d [ 1061.375780] env[68285]: INFO nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating resource usage from migration cfcece04-fb70-4f24-94cf-cf4bae5a06fb [ 1061.402047] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance ec89a2a4-3bfc-45c5-b7f2-239b52995d6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.402226] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 60144efd-061e-4144-9541-b2321c9b0ec1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.402356] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.402484] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a97df3d2-c182-46d8-95c2-61caccade285 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.402961] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5e101d74-7a82-4118-8f4c-7af9a6b0917a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.402961] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d4f20336-9c29-4aac-8c0d-f577749cd7d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.402961] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f13ad5e7-341f-4475-b334-2144b0923e3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.403116] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance b3b7f551-81aa-4ac4-9906-020fac5f01f7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.403250] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 64103f25-6411-44be-a60f-b9c276dba331 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.403370] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 3c71f649-b456-45a0-a113-725a529702a2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.403487] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9e81990d-e63e-48a7-8941-f0298ca184b3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.403607] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 1dce61a2-0fe2-4384-835c-7e324446d7cc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.403725] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 2a9b3b56-8607-4da8-9186-8a933cfe0351 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1061.403839] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.403957] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f2696c7f-5676-403a-87e0-fb0884866005 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.404124] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Migration e2bf8756-19fa-4126-b4c6-11b9b79ae81d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1061.404242] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 34aeba05-804e-444c-8e58-69c7721b10b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1061.720726] env[68285]: DEBUG nova.network.neutron [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.907321] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.225449] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.410688] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance be47df2a-aee7-4275-9acb-9cf74367f503 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1062.914487] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance bb806297-47c6-45b7-a177-f3300fa1e29a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.422359] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0b04097-292a-47e7-8f14-199b1650dc2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1063.744658] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2e6772-1dd4-4f52-bc7a-a9b5de60b0e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.765809] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance '34aeba05-804e-444c-8e58-69c7721b10b1' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1063.782537] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5248aa3a-2745-7a7f-58e7-db260153fab8/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1063.783654] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb91c366-b055-4eda-81e6-bae5ea73ff50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.792427] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5248aa3a-2745-7a7f-58e7-db260153fab8/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1063.792771] env[68285]: ERROR oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5248aa3a-2745-7a7f-58e7-db260153fab8/disk-0.vmdk due to incomplete transfer. [ 1063.793098] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fc45af93-b31e-48f6-8ba3-425ca70dde5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.802764] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5248aa3a-2745-7a7f-58e7-db260153fab8/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1063.802992] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Uploaded image 6fd33489-77cb-4438-9011-563e08865919 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1063.805540] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1063.805993] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4aa35ef1-218c-48aa-90bc-783670e2acc8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.812793] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1063.812793] env[68285]: value = "task-2891867" [ 1063.812793] env[68285]: _type = "Task" [ 1063.812793] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.822530] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891867, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.926689] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9c190abd-23ee-4e8e-8b91-9050847581d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1064.274860] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.275219] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49aeb840-735e-4d97-b463-043173685160 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.282860] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1064.282860] env[68285]: value = "task-2891868" [ 1064.282860] env[68285]: _type = "Task" [ 1064.282860] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.293378] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1064.294028] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance '34aeba05-804e-444c-8e58-69c7721b10b1' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1064.323205] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891867, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.430948] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 3858399e-9fc4-4d60-a9d5-95caefb7bd87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1064.799922] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1064.800230] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1064.800392] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1064.801827] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1064.801827] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1064.801827] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1064.801827] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1064.801827] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1064.801827] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1064.802338] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1064.802338] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1064.811020] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c295c9c0-d853-4235-b7de-7a1e564d1664 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.828609] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1064.828609] env[68285]: value = "task-2891869" [ 1064.828609] env[68285]: _type = "Task" [ 1064.828609] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.832159] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891867, 'name': Destroy_Task, 'duration_secs': 0.573549} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.835123] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Destroyed the VM [ 1064.835482] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1064.835816] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b8698780-b8f1-41ed-9c21-919f5d88e225 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.845382] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1064.845382] env[68285]: value = "task-2891870" [ 1064.845382] env[68285]: _type = "Task" [ 1064.845382] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.855184] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891870, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.934600] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance cbf2a387-8a5a-4400-833b-e04e23ca42f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1065.342075] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891869, 'name': ReconfigVM_Task, 'duration_secs': 0.177122} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.342464] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance '34aeba05-804e-444c-8e58-69c7721b10b1' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1065.353888] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891870, 'name': RemoveSnapshot_Task, 'duration_secs': 0.356944} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.354164] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1065.354440] env[68285]: DEBUG nova.compute.manager [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1065.355200] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cc32de-e8d9-47dd-bbf1-cd08e9b25acd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.438428] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d025b807-fda4-4aff-beac-0ad6a092fe74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1065.438428] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Migration cfcece04-fb70-4f24-94cf-cf4bae5a06fb is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1065.438428] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 1f5fe064-0443-4b7f-911a-45d803836eeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.438428] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1065.438428] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2560MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1065.799718] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6734576b-cea3-419b-bdba-015f4af41743 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.808611] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ccaed7-82f1-4169-b225-e8ae06e9ad52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.839846] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1aad1b-b1e0-4961-958b-f16c342a449f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.848133] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7414725e-eafe-4a5b-a37c-30a988a0c0e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.853788] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.854192] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.854450] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.854750] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.855009] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.855308] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.855793] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.856064] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.856333] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.856590] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.856916] env[68285]: DEBUG nova.virt.hardware [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.863739] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Reconfiguring VM instance instance-0000001e to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1065.866184] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a9f6fea-9e28-4000-a87a-4631b207c4ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.879842] env[68285]: INFO nova.compute.manager [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Shelve offloading [ 1065.891244] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.893676] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1065.893676] env[68285]: value = "task-2891871" [ 1065.893676] env[68285]: _type = "Task" [ 1065.893676] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.901843] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891871, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.393792] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.394693] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1066.397721] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-717a4c4d-b4da-47b0-9f92-d63bf08fc548 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.407973] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891871, 'name': ReconfigVM_Task, 'duration_secs': 0.183247} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.409218] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Reconfigured VM instance instance-0000001e to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1066.409539] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1066.409539] env[68285]: value = "task-2891875" [ 1066.409539] env[68285]: _type = "Task" [ 1066.409539] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.410209] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad441f8a-e7a9-4a55-a63a-58bb9fc02a83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.436015] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 34aeba05-804e-444c-8e58-69c7721b10b1/34aeba05-804e-444c-8e58-69c7721b10b1.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.438918] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-919da000-e8c1-4933-94d0-d494434481ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.457897] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1066.457897] env[68285]: value = "task-2891876" [ 1066.457897] env[68285]: _type = "Task" [ 1066.457897] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.463813] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.901080] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1066.901397] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.551s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.901685] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 55.731s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.903159] env[68285]: INFO nova.compute.claims [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1066.905749] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.905893] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Cleaning up deleted instances {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1066.922386] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1066.922591] env[68285]: DEBUG nova.compute.manager [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.923343] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee71f8a-1254-4ae1-af85-3f6e131a7c25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.928911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.929096] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.929537] env[68285]: DEBUG nova.network.neutron [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.965595] env[68285]: DEBUG oslo_vmware.api [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891876, 'name': ReconfigVM_Task, 'duration_secs': 0.291558} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.965817] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 34aeba05-804e-444c-8e58-69c7721b10b1/34aeba05-804e-444c-8e58-69c7721b10b1.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.966085] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance '34aeba05-804e-444c-8e58-69c7721b10b1' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1067.421962] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] There are 50 instances to clean {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1067.422334] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9569d50c-d358-4cc5-a106-32da785e4765] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1067.476102] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b1c77f-7131-4d1e-8873-7f399ac89303 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.495299] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bed204-c1fc-491b-880c-c2ace7a9a235 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.512240] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance '34aeba05-804e-444c-8e58-69c7721b10b1' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1067.684234] env[68285]: DEBUG nova.network.neutron [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Updating instance_info_cache with network_info: [{"id": "34b304ef-32d9-464e-98cc-8226f6f71037", "address": "fa:16:3e:4c:43:4d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34b304ef-32", "ovs_interfaceid": "34b304ef-32d9-464e-98cc-8226f6f71037", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.925580] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 32d23c62-23ec-4732-a95d-6ac32805e1b9] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1068.058639] env[68285]: DEBUG nova.network.neutron [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Port b67172eb-4f98-4870-a433-22f6e238cbf4 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1068.186707] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.265990] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a118d39e-7959-4bd1-8a74-b3d59d0d4dc7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.275097] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bdb3a3-5df2-4cf3-8af4-ae11ffaf8bd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.323844] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f33d2b-e1af-4dce-b7dd-9d7143267cee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.332286] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7b7f60-bbdd-4fe5-a9bc-b8877e5b557b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.347884] env[68285]: DEBUG nova.compute.provider_tree [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1068.433152] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: f1b8808d-c3a1-4be6-b6ec-ed441291e8f2] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1068.526966] env[68285]: DEBUG nova.compute.manager [req-e423ed9f-0596-46ab-8564-42bcbe2af9bd req-ed51cf1c-d726-4732-b03d-7a4a0a1e971e service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Received event network-vif-unplugged-34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.527210] env[68285]: DEBUG oslo_concurrency.lockutils [req-e423ed9f-0596-46ab-8564-42bcbe2af9bd req-ed51cf1c-d726-4732-b03d-7a4a0a1e971e service nova] Acquiring lock "f2696c7f-5676-403a-87e0-fb0884866005-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.527447] env[68285]: DEBUG oslo_concurrency.lockutils [req-e423ed9f-0596-46ab-8564-42bcbe2af9bd req-ed51cf1c-d726-4732-b03d-7a4a0a1e971e service nova] Lock "f2696c7f-5676-403a-87e0-fb0884866005-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.527584] env[68285]: DEBUG oslo_concurrency.lockutils [req-e423ed9f-0596-46ab-8564-42bcbe2af9bd req-ed51cf1c-d726-4732-b03d-7a4a0a1e971e service nova] Lock "f2696c7f-5676-403a-87e0-fb0884866005-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.527747] env[68285]: DEBUG nova.compute.manager [req-e423ed9f-0596-46ab-8564-42bcbe2af9bd req-ed51cf1c-d726-4732-b03d-7a4a0a1e971e service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] No waiting events found dispatching network-vif-unplugged-34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1068.527912] env[68285]: WARNING nova.compute.manager [req-e423ed9f-0596-46ab-8564-42bcbe2af9bd req-ed51cf1c-d726-4732-b03d-7a4a0a1e971e service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Received unexpected event network-vif-unplugged-34b304ef-32d9-464e-98cc-8226f6f71037 for instance with vm_state shelved and task_state shelving_offloading. [ 1068.581721] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1068.582819] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144cdba8-4232-456e-a80b-4be74f6c8719 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.591843] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1068.592367] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47fba3d3-b4f3-4e2f-b684-20b96b46d5f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.886250] env[68285]: DEBUG nova.scheduler.client.report [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1068.886554] env[68285]: DEBUG nova.compute.provider_tree [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 102 to 103 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1068.886731] env[68285]: DEBUG nova.compute.provider_tree [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1068.925100] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1068.925331] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1068.925580] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleting the datastore file [datastore1] f2696c7f-5676-403a-87e0-fb0884866005 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.925915] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-413be224-cd4f-44b8-a755-d99b3e672303 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.933650] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1068.933650] env[68285]: value = "task-2891879" [ 1068.933650] env[68285]: _type = "Task" [ 1068.933650] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.937090] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 7bef3e2a-00ab-480a-aa8c-335635ee5d31] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1068.945098] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.082298] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.082577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.082763] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.393871] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.394431] env[68285]: DEBUG nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1069.398028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 57.894s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.398028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.399813] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 52.755s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.401517] env[68285]: INFO nova.compute.claims [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1069.424570] env[68285]: INFO nova.scheduler.client.report [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleted allocations for instance 64103f25-6411-44be-a60f-b9c276dba331 [ 1069.443568] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8a848ec8-1ae0-4437-be4f-49219214d11f] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1069.445966] env[68285]: DEBUG oslo_vmware.api [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2891879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245236} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.446405] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1069.447037] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1069.447037] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1069.468662] env[68285]: INFO nova.scheduler.client.report [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted allocations for instance f2696c7f-5676-403a-87e0-fb0884866005 [ 1069.906810] env[68285]: DEBUG nova.compute.utils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1069.911243] env[68285]: DEBUG nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1069.911501] env[68285]: DEBUG nova.network.neutron [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1069.931579] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3e5d6387-af9f-48b0-b5d0-a089c1c7ae04 tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "64103f25-6411-44be-a60f-b9c276dba331" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 61.987s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.947664] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 14285f6e-10a4-4077-a666-3c8d0cc1b87c] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1069.977366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.993951] env[68285]: DEBUG nova.policy [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06dcd563ef9449d388123ea3e53e9ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce15d1105bcb418a89ad40e5505757bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1070.124058] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.124262] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.124444] env[68285]: DEBUG nova.network.neutron [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1070.374986] env[68285]: DEBUG nova.network.neutron [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Successfully created port: 9aa7f869-5c69-4616-a1c6-b2957527dd68 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1070.415495] env[68285]: DEBUG nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1070.450841] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: c690490f-9278-4595-8286-d4fd970bbc39] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1070.559850] env[68285]: DEBUG nova.compute.manager [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Received event network-changed-34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.560086] env[68285]: DEBUG nova.compute.manager [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Refreshing instance network info cache due to event network-changed-34b304ef-32d9-464e-98cc-8226f6f71037. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1070.560309] env[68285]: DEBUG oslo_concurrency.lockutils [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] Acquiring lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.560455] env[68285]: DEBUG oslo_concurrency.lockutils [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] Acquired lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.560618] env[68285]: DEBUG nova.network.neutron [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Refreshing network info cache for port 34b304ef-32d9-464e-98cc-8226f6f71037 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.830185] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a42399a-59ad-4da5-991e-5316ccd0e982 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.837961] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bf56ba-3326-496b-a81a-30a000d81563 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.872149] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04ec1bf-1a8a-43ff-9dbd-54d009479851 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.879969] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827a5da3-1c3a-4620-8fc4-7c71fa796d6d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.892989] env[68285]: DEBUG nova.compute.provider_tree [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.941937] env[68285]: DEBUG nova.network.neutron [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.956577] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8b473550-4a40-48a5-9e1c-7c48df828e61] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1071.222588] env[68285]: DEBUG oslo_concurrency.lockutils [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "f2696c7f-5676-403a-87e0-fb0884866005" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.312698] env[68285]: DEBUG nova.network.neutron [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Updated VIF entry in instance network info cache for port 34b304ef-32d9-464e-98cc-8226f6f71037. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1071.312835] env[68285]: DEBUG nova.network.neutron [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Updating instance_info_cache with network_info: [{"id": "34b304ef-32d9-464e-98cc-8226f6f71037", "address": "fa:16:3e:4c:43:4d", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": null, "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap34b304ef-32", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.396410] env[68285]: DEBUG nova.scheduler.client.report [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.427972] env[68285]: DEBUG nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1071.444856] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.456929] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1071.456929] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.456929] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1071.457370] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.457370] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1071.457370] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1071.457670] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1071.457670] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1071.457833] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1071.457992] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1071.458180] env[68285]: DEBUG nova.virt.hardware [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1071.459073] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7204f6e-3bb2-4085-b51a-0833befe3ef8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.462170] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: c0409ef9-bd21-4d42-9992-eb5ff7bbcb3e] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1071.469415] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47eb5e74-64ca-4b68-a7a8-bbed9beb030f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.601893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.602144] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.817694] env[68285]: DEBUG oslo_concurrency.lockutils [req-668c0097-0505-47db-9345-a10cfd2a0624 req-ee5e2b2f-621a-4f9f-947e-06c3a1379d3a service nova] Releasing lock "refresh_cache-f2696c7f-5676-403a-87e0-fb0884866005" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.904610] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.904610] env[68285]: DEBUG nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1071.907574] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 45.316s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.908236] env[68285]: DEBUG nova.objects.instance [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1071.968216] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: c7ab28c3-a316-4685-b876-a0e7c657ec35] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1071.969845] env[68285]: DEBUG nova.network.neutron [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Successfully updated port: 9aa7f869-5c69-4616-a1c6-b2957527dd68 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1071.985341] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dfaa60-1fa9-4a63-8939-22303b943374 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.007327] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4616a5-5fba-4d75-9e68-baacccc1fde1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.014675] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance '34aeba05-804e-444c-8e58-69c7721b10b1' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1072.104890] env[68285]: DEBUG nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1072.416442] env[68285]: DEBUG nova.compute.utils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1072.417627] env[68285]: DEBUG nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1072.417803] env[68285]: DEBUG nova.network.neutron [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1072.461686] env[68285]: DEBUG nova.policy [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c24b9d2248894d52a699df20175b2692', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fb202eb50a74c558edb6fdb9dfaf077', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1072.471442] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 2a1cc678-2bb2-403e-b6e8-afdeb8362eac] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1072.475023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.475023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquired lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.475023] env[68285]: DEBUG nova.network.neutron [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1072.520598] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39abaa50-a4c5-4ff9-b566-fe3b5fbb74e9 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance '34aeba05-804e-444c-8e58-69c7721b10b1' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1072.588849] env[68285]: DEBUG nova.compute.manager [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Received event network-vif-plugged-9aa7f869-5c69-4616-a1c6-b2957527dd68 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1072.589220] env[68285]: DEBUG oslo_concurrency.lockutils [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] Acquiring lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.589525] env[68285]: DEBUG oslo_concurrency.lockutils [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.589758] env[68285]: DEBUG oslo_concurrency.lockutils [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.590039] env[68285]: DEBUG nova.compute.manager [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] No waiting events found dispatching network-vif-plugged-9aa7f869-5c69-4616-a1c6-b2957527dd68 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1072.590268] env[68285]: WARNING nova.compute.manager [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Received unexpected event network-vif-plugged-9aa7f869-5c69-4616-a1c6-b2957527dd68 for instance with vm_state building and task_state spawning. [ 1072.590474] env[68285]: DEBUG nova.compute.manager [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Received event network-changed-9aa7f869-5c69-4616-a1c6-b2957527dd68 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1072.590669] env[68285]: DEBUG nova.compute.manager [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Refreshing instance network info cache due to event network-changed-9aa7f869-5c69-4616-a1c6-b2957527dd68. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1072.590874] env[68285]: DEBUG oslo_concurrency.lockutils [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] Acquiring lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.628037] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.779859] env[68285]: DEBUG nova.network.neutron [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Successfully created port: 724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1072.919234] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b4274870-b90c-4902-a9e3-d44216a01396 tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.920360] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.025s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.921745] env[68285]: INFO nova.compute.claims [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1072.924659] env[68285]: DEBUG nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1072.977727] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 7df1a9b4-e363-4e35-a8d5-6b09b671e6a5] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1073.010262] env[68285]: DEBUG nova.network.neutron [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1073.148319] env[68285]: DEBUG nova.network.neutron [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Updating instance_info_cache with network_info: [{"id": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "address": "fa:16:3e:1f:33:15", "network": {"id": "ab5e0ed2-3b2f-4410-9b25-274de374d356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1040104261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15d1105bcb418a89ad40e5505757bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa7f869-5c", "ovs_interfaceid": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.483100] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d1b5abfa-fd38-4d17-b75f-5036af841d24] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1073.652043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Releasing lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.652043] env[68285]: DEBUG nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Instance network_info: |[{"id": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "address": "fa:16:3e:1f:33:15", "network": {"id": "ab5e0ed2-3b2f-4410-9b25-274de374d356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1040104261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15d1105bcb418a89ad40e5505757bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa7f869-5c", "ovs_interfaceid": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1073.652322] env[68285]: DEBUG oslo_concurrency.lockutils [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] Acquired lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.652463] env[68285]: DEBUG nova.network.neutron [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Refreshing network info cache for port 9aa7f869-5c69-4616-a1c6-b2957527dd68 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1073.653671] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:33:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8145bd31-c4a7-4828-8818-d065010c9565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9aa7f869-5c69-4616-a1c6-b2957527dd68', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1073.661227] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Creating folder: Project (ce15d1105bcb418a89ad40e5505757bb). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1073.664169] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00a652ba-a615-4c57-baff-2782d06ae5e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.675876] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Created folder: Project (ce15d1105bcb418a89ad40e5505757bb) in parent group-v580775. [ 1073.676077] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Creating folder: Instances. Parent ref: group-v580973. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1073.676320] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81abf161-fc10-4d41-a473-21fe28c7cb4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.686745] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Created folder: Instances in parent group-v580973. [ 1073.686745] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1073.686745] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1073.686909] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2410e423-3967-4758-9d15-ceff37f45a3c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.707978] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1073.707978] env[68285]: value = "task-2891884" [ 1073.707978] env[68285]: _type = "Task" [ 1073.707978] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.716283] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891884, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.871738] env[68285]: DEBUG nova.network.neutron [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Updated VIF entry in instance network info cache for port 9aa7f869-5c69-4616-a1c6-b2957527dd68. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1073.872100] env[68285]: DEBUG nova.network.neutron [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Updating instance_info_cache with network_info: [{"id": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "address": "fa:16:3e:1f:33:15", "network": {"id": "ab5e0ed2-3b2f-4410-9b25-274de374d356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1040104261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15d1105bcb418a89ad40e5505757bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa7f869-5c", "ovs_interfaceid": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.938764] env[68285]: DEBUG nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1073.965525] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1073.965770] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1073.965926] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1073.966124] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1073.966275] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1073.966469] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1073.966724] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1073.966904] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1073.967093] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1073.967277] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1073.967458] env[68285]: DEBUG nova.virt.hardware [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1073.968766] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffcec9e-9502-4463-bc71-85052c832713 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.979975] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39b6caa-e9e7-4b65-858f-1b62dc21a024 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.987279] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 5266817c-ce3b-4c96-a3bd-32b631c29b81] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1074.223327] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891884, 'name': CreateVM_Task, 'duration_secs': 0.300553} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.223544] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1074.224773] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.224941] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.225274] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1074.225531] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14673f49-4e9e-4eb8-aeb5-f09106ac3181 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.235975] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1074.235975] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5285c0af-0ea3-5a9e-cf06-3c9ee1a271ff" [ 1074.235975] env[68285]: _type = "Task" [ 1074.235975] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.245634] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5285c0af-0ea3-5a9e-cf06-3c9ee1a271ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.273377] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a377bdb5-cd88-4959-aebf-f9aa82e0aa05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.280382] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e581eb-8299-4e35-bbd1-f73bd2a0e078 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.310761] env[68285]: DEBUG nova.network.neutron [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Successfully updated port: 724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1074.312477] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65781c0-4f44-49e4-b7d4-c44b9dd46b63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.325585] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73a1b02-cb60-43ba-b5f1-9ef03bb89587 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.340338] env[68285]: DEBUG nova.compute.provider_tree [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.375302] env[68285]: DEBUG oslo_concurrency.lockutils [req-21feaf73-4cfd-4878-ac34-530e8e35bd30 req-773fe7b5-09a7-4f22-a002-46677b015fac service nova] Releasing lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.498580] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 940e0328-970d-4f49-a102-d8a00b8c299b] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1074.681714] env[68285]: DEBUG nova.compute.manager [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received event network-vif-plugged-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.681822] env[68285]: DEBUG oslo_concurrency.lockutils [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.682019] env[68285]: DEBUG oslo_concurrency.lockutils [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.682213] env[68285]: DEBUG oslo_concurrency.lockutils [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.682378] env[68285]: DEBUG nova.compute.manager [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] No waiting events found dispatching network-vif-plugged-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1074.682583] env[68285]: WARNING nova.compute.manager [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received unexpected event network-vif-plugged-724df450-925b-47ae-884b-4935b5b95ab2 for instance with vm_state building and task_state spawning. [ 1074.682695] env[68285]: DEBUG nova.compute.manager [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received event network-changed-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.682845] env[68285]: DEBUG nova.compute.manager [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Refreshing instance network info cache due to event network-changed-724df450-925b-47ae-884b-4935b5b95ab2. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1074.683064] env[68285]: DEBUG oslo_concurrency.lockutils [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] Acquiring lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.683172] env[68285]: DEBUG oslo_concurrency.lockutils [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] Acquired lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.683324] env[68285]: DEBUG nova.network.neutron [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Refreshing network info cache for port 724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1074.746611] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5285c0af-0ea3-5a9e-cf06-3c9ee1a271ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009932} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.746982] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.747247] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.747482] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.747625] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.747803] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.748106] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9afd29d-ffd5-4693-9218-8aec02e4f3ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.756970] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.757183] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1074.757911] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8a32120-16cd-4c13-85d3-4afddf8079fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.762887] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1074.762887] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bc7206-1b40-0c50-a08b-4ba22303ed06" [ 1074.762887] env[68285]: _type = "Task" [ 1074.762887] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.770592] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bc7206-1b40-0c50-a08b-4ba22303ed06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.819331] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.843889] env[68285]: DEBUG nova.scheduler.client.report [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.001383] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 631fe0ee-73a6-48c5-9a14-f6a00d2c2942] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1075.165241] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "34aeba05-804e-444c-8e58-69c7721b10b1" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.165496] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.165682] env[68285]: DEBUG nova.compute.manager [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Going to confirm migration 3 {{(pid=68285) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1075.215607] env[68285]: DEBUG nova.network.neutron [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1075.272345] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bc7206-1b40-0c50-a08b-4ba22303ed06, 'name': SearchDatastore_Task, 'duration_secs': 0.008118} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.273123] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97c3b484-f866-4bdf-afc0-026adbd19929 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.277842] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1075.277842] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52440968-e742-304b-37a3-4d7260bd3081" [ 1075.277842] env[68285]: _type = "Task" [ 1075.277842] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.285243] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52440968-e742-304b-37a3-4d7260bd3081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.297226] env[68285]: DEBUG nova.network.neutron [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.348757] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.428s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.349361] env[68285]: DEBUG nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1075.352054] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.348s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.352344] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.354307] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.824s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.356106] env[68285]: INFO nova.compute.claims [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1075.383826] env[68285]: INFO nova.scheduler.client.report [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Deleted allocations for instance 1dce61a2-0fe2-4384-835c-7e324446d7cc [ 1075.505440] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 3e656d8d-bd06-4886-9424-4ed76b98aae9] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1075.714156] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.714370] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.714547] env[68285]: DEBUG nova.network.neutron [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1075.714730] env[68285]: DEBUG nova.objects.instance [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'info_cache' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.787877] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52440968-e742-304b-37a3-4d7260bd3081, 'name': SearchDatastore_Task, 'duration_secs': 0.009282} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.788190] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.788422] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81/2e5a2839-3cdf-436d-89eb-5d6f83c3bf81.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1075.792022] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55b6be25-5e77-4ca4-9e5c-b69ecd006e86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.794496] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1075.794496] env[68285]: value = "task-2891886" [ 1075.794496] env[68285]: _type = "Task" [ 1075.794496] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.802141] env[68285]: DEBUG oslo_concurrency.lockutils [req-aebf525f-28f5-4d1f-ab2c-39f563dc20ff req-6d8cdc4e-ce19-4465-b273-38936dc44dee service nova] Releasing lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.802559] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.805131] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.805131] env[68285]: DEBUG nova.network.neutron [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1075.861076] env[68285]: DEBUG nova.compute.utils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1075.868061] env[68285]: DEBUG nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1075.892366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2694b1f8-388e-40b8-984e-5e4a96b82952 tempest-ServerPasswordTestJSON-391053730 tempest-ServerPasswordTestJSON-391053730-project-member] Lock "1dce61a2-0fe2-4384-835c-7e324446d7cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.958s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.008432] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 753bb2f7-bf0a-401e-81af-93982558d3b7] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1076.307143] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891886, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468893} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.307274] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81/2e5a2839-3cdf-436d-89eb-5d6f83c3bf81.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1076.307379] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1076.307891] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9c684cd-3433-49c6-b24f-ae385d04f247 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.313960] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1076.313960] env[68285]: value = "task-2891887" [ 1076.313960] env[68285]: _type = "Task" [ 1076.313960] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.321510] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.345258] env[68285]: DEBUG nova.network.neutron [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1076.366847] env[68285]: DEBUG nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1076.516981] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 324cc3e5-1c81-498e-b520-e9fca26013ef] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1076.580163] env[68285]: DEBUG nova.network.neutron [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [{"id": "724df450-925b-47ae-884b-4935b5b95ab2", "address": "fa:16:3e:0f:59:8c", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap724df450-92", "ovs_interfaceid": "724df450-925b-47ae-884b-4935b5b95ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.766771] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093cdac6-55bd-4e07-80d9-c19d21e4a486 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.778219] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c539f5ce-f17a-4451-b60a-1192b1191d41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.814515] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc143597-f13c-4da9-85f1-863122f0ed55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.828364] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c12b71-3775-4d99-b551-98afaaf82ef4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.832153] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063952} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.832382] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.833666] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbe68ad-a0c4-461a-9b63-45ed8fe5b858 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.845045] env[68285]: DEBUG nova.compute.provider_tree [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.873018] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81/2e5a2839-3cdf-436d-89eb-5d6f83c3bf81.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.873018] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f68b291-9c27-4ba8-b098-00d3765f5d5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.898635] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1076.898635] env[68285]: value = "task-2891888" [ 1076.898635] env[68285]: _type = "Task" [ 1076.898635] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.905624] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.023235] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 29981c10-c6dd-4852-94ad-1f8f0135b8cc] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1077.082748] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.083109] env[68285]: DEBUG nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance network_info: |[{"id": "724df450-925b-47ae-884b-4935b5b95ab2", "address": "fa:16:3e:0f:59:8c", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap724df450-92", "ovs_interfaceid": "724df450-925b-47ae-884b-4935b5b95ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1077.083524] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:59:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '724df450-925b-47ae-884b-4935b5b95ab2', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.092408] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating folder: Project (2fb202eb50a74c558edb6fdb9dfaf077). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1077.093594] env[68285]: DEBUG nova.network.neutron [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.095187] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b371abcc-90a0-4f7f-9776-f5847a26950c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.105871] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Created folder: Project (2fb202eb50a74c558edb6fdb9dfaf077) in parent group-v580775. [ 1077.106139] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating folder: Instances. Parent ref: group-v580977. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1077.106396] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fba5b8aa-7c6e-4b5a-b182-9367bf32e4f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.120015] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Created folder: Instances in parent group-v580977. [ 1077.120015] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1077.120015] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.120015] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a9c47cf-19dc-4f1e-8135-cd7b5c90249a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.137253] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.137253] env[68285]: value = "task-2891891" [ 1077.137253] env[68285]: _type = "Task" [ 1077.137253] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.145681] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891891, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.349777] env[68285]: DEBUG nova.scheduler.client.report [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1077.393093] env[68285]: DEBUG nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1077.407278] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891888, 'name': ReconfigVM_Task, 'duration_secs': 0.384515} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.407558] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81/2e5a2839-3cdf-436d-89eb-5d6f83c3bf81.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.408312] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cb647a9-a6c6-4b41-af2d-0974c1778408 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.415322] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1077.415322] env[68285]: value = "task-2891893" [ 1077.415322] env[68285]: _type = "Task" [ 1077.415322] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.420150] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1077.420498] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.420566] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1077.420708] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.420874] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1077.420991] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1077.421212] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1077.421370] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1077.421532] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1077.421689] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1077.421858] env[68285]: DEBUG nova.virt.hardware [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1077.422590] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778e69df-7c51-47c4-9047-7483411743fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.431553] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891893, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.435046] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51545c3-0be7-4ef5-9d34-b3a632f77967 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.448955] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.454530] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Creating folder: Project (50add22e3d2145d6b9c2f20123e77bcf). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1077.454816] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0456a3d-b00b-400f-8e33-7596ecb280e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.464387] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Created folder: Project (50add22e3d2145d6b9c2f20123e77bcf) in parent group-v580775. [ 1077.464591] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Creating folder: Instances. Parent ref: group-v580980. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1077.464825] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e141d3d2-8460-48de-8d3e-b20232b2adbc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.472440] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Created folder: Instances in parent group-v580980. [ 1077.472667] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1077.472852] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.473057] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf59c1d2-5358-4cc9-af74-24e115ae3f12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.488948] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.488948] env[68285]: value = "task-2891896" [ 1077.488948] env[68285]: _type = "Task" [ 1077.488948] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.496639] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891896, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.525450] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d2c3e3eb-4b05-4e08-bd08-0f42560fcdba] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1077.597959] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.598355] env[68285]: DEBUG nova.objects.instance [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'migration_context' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.651334] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891891, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.854710] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.855254] env[68285]: DEBUG nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1077.857823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.278s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.858199] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.861311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.828s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.861311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.862460] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 44.005s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.862460] env[68285]: DEBUG nova.objects.instance [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1077.887054] env[68285]: INFO nova.scheduler.client.report [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted allocations for instance 3c71f649-b456-45a0-a113-725a529702a2 [ 1077.897388] env[68285]: INFO nova.scheduler.client.report [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Deleted allocations for instance 9e81990d-e63e-48a7-8941-f0298ca184b3 [ 1077.925525] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891893, 'name': Rename_Task, 'duration_secs': 0.159205} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.925788] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1077.926050] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-beb346a0-8e08-4c95-becd-18cd0f7a879e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.932376] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1077.932376] env[68285]: value = "task-2891897" [ 1077.932376] env[68285]: _type = "Task" [ 1077.932376] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.940148] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.998039] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891896, 'name': CreateVM_Task, 'duration_secs': 0.28372} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.998208] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1077.998636] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.998794] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.999167] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1077.999417] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c354dbd-5bf5-4701-b9e6-8b1b6bc77a7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.003741] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1078.003741] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e4285c-83b1-d49d-964a-6018c166fc31" [ 1078.003741] env[68285]: _type = "Task" [ 1078.003741] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.011194] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e4285c-83b1-d49d-964a-6018c166fc31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.029193] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: bda5b2fb-1875-4078-a4c1-f76f6abeaaf5] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1078.100854] env[68285]: DEBUG nova.objects.base [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Object Instance<34aeba05-804e-444c-8e58-69c7721b10b1> lazy-loaded attributes: info_cache,migration_context {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1078.101894] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650a15cf-a6de-4650-b534-152ddc7698ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.124224] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb55cbb-7049-4d1d-8be5-33611af55620 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.130220] env[68285]: DEBUG oslo_vmware.api [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1078.130220] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521651d6-87cc-ffdc-3283-08c746804510" [ 1078.130220] env[68285]: _type = "Task" [ 1078.130220] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.140671] env[68285]: DEBUG oslo_vmware.api [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521651d6-87cc-ffdc-3283-08c746804510, 'name': SearchDatastore_Task, 'duration_secs': 0.006844} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.143713] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.149798] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891891, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.371962] env[68285]: DEBUG nova.compute.utils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1078.373442] env[68285]: DEBUG nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1078.373617] env[68285]: DEBUG nova.network.neutron [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1078.399945] env[68285]: DEBUG oslo_concurrency.lockutils [None req-74288414-5eb5-4ffa-a58a-53c96075cf9c tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "3c71f649-b456-45a0-a113-725a529702a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.423s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.405322] env[68285]: DEBUG oslo_concurrency.lockutils [None req-29500154-a3c1-467f-a792-702f8324b91c tempest-ServersListShow2100Test-1271188201 tempest-ServersListShow2100Test-1271188201-project-member] Lock "9e81990d-e63e-48a7-8941-f0298ca184b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.114s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.439347] env[68285]: DEBUG nova.policy [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fd0582abf8e4fff8e6f8316ba430988', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07b5865cc5804d8d98073e5d0c1449aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1078.449834] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891897, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.520296] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e4285c-83b1-d49d-964a-6018c166fc31, 'name': SearchDatastore_Task, 'duration_secs': 0.01003} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.520614] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.520836] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.521083] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.521230] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.521411] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.521680] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-436dd7f8-c9a5-4b99-93e1-1dbe46012cd6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.531559] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.531559] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1078.531559] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb2c1d0-91c1-4405-a58f-ead3b0b4062f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.533540] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 437a18da-8fe4-478e-82a0-3b1a9da47df8] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1078.538235] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1078.538235] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522ad0b6-6a14-a528-a608-1b3580b6babc" [ 1078.538235] env[68285]: _type = "Task" [ 1078.538235] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.546752] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522ad0b6-6a14-a528-a608-1b3580b6babc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.651960] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891891, 'name': CreateVM_Task, 'duration_secs': 1.372501} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.655175] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1078.656363] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.656560] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.656840] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1078.657146] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab93bfca-252f-4317-a0f0-10214ec1f120 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.662061] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1078.662061] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521f6eb4-eb85-ea03-b242-2cacd5667796" [ 1078.662061] env[68285]: _type = "Task" [ 1078.662061] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.671118] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521f6eb4-eb85-ea03-b242-2cacd5667796, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.710024] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "1a040977-b57e-4b67-b259-065b788141de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.710024] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "1a040977-b57e-4b67-b259-065b788141de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.859491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "8c299247-896d-4ff1-b73a-22a71ec972fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.859744] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "8c299247-896d-4ff1-b73a-22a71ec972fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.872205] env[68285]: DEBUG nova.network.neutron [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Successfully created port: de764e97-9703-4359-9800-31118f814f1e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1078.879466] env[68285]: DEBUG oslo_concurrency.lockutils [None req-33bf3c49-5b16-41de-94be-ad837a80d8db tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.881343] env[68285]: DEBUG nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1078.887180] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.669s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.887457] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.890105] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.049s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.892435] env[68285]: INFO nova.compute.claims [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1078.924890] env[68285]: INFO nova.scheduler.client.report [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Deleted allocations for instance b3b7f551-81aa-4ac4-9906-020fac5f01f7 [ 1078.944629] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891897, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.040099] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: b0f32ce2-92fd-4290-a2f4-e5658f775f4f] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1079.048836] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522ad0b6-6a14-a528-a608-1b3580b6babc, 'name': SearchDatastore_Task, 'duration_secs': 0.010311} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.049827] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-938b2229-e0fc-41c2-badb-bc6ac07be823 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.058410] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1079.058410] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521b530d-f1db-bc07-1544-497a1b58928a" [ 1079.058410] env[68285]: _type = "Task" [ 1079.058410] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.068737] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521b530d-f1db-bc07-1544-497a1b58928a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.174839] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521f6eb4-eb85-ea03-b242-2cacd5667796, 'name': SearchDatastore_Task, 'duration_secs': 0.009203} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.175038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.175287] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1079.175503] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.210993] env[68285]: DEBUG nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1079.433059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c58418ce-376b-4328-910c-667011af0980 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "b3b7f551-81aa-4ac4-9906-020fac5f01f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.849s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.446566] env[68285]: DEBUG oslo_vmware.api [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891897, 'name': PowerOnVM_Task, 'duration_secs': 1.265523} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.446822] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.447028] env[68285]: INFO nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Took 8.02 seconds to spawn the instance on the hypervisor. [ 1079.447250] env[68285]: DEBUG nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.447999] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266cf9b6-99ed-4d58-991c-66471ff81672 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.540530] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1b9dd0e2-781f-43d7-a66e-e718a0972c78] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1079.568979] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521b530d-f1db-bc07-1544-497a1b58928a, 'name': SearchDatastore_Task, 'duration_secs': 0.011442} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.569280] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.569538] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1079.569809] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.569997] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.570238] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cde8f18-8226-4316-81c7-37cc1eedabbf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.572221] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09480039-91e8-4c06-85fe-82a3a4a75ced {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.578249] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1079.578249] env[68285]: value = "task-2891899" [ 1079.578249] env[68285]: _type = "Task" [ 1079.578249] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.581949] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.582153] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1079.583053] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b37cdade-e171-4867-86ca-e9d8d3001e85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.587689] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.590319] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1079.590319] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b89794-90bd-d839-4c46-dbee0154ce1d" [ 1079.590319] env[68285]: _type = "Task" [ 1079.590319] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.597104] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b89794-90bd-d839-4c46-dbee0154ce1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.739501] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.042623] env[68285]: DEBUG nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1080.054043] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: a2a7590d-c415-4955-8a25-4b1411449557] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1080.058108] env[68285]: INFO nova.compute.manager [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Took 68.91 seconds to build instance. [ 1080.095745] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477907} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.097929] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1080.098169] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.098326] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.098509] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.098665] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.098801] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1080.099016] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1080.099211] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1080.099391] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1080.099554] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1080.099725] env[68285]: DEBUG nova.virt.hardware [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1080.105876] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1080.105876] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1080.110728] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a903a87-9727-4100-b0a6-1c8ba330d8f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.110728] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3214551-9aae-4cf6-861b-50b51ecbf9dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.117436] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b89794-90bd-d839-4c46-dbee0154ce1d, 'name': SearchDatastore_Task, 'duration_secs': 0.008308} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.121099] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1080.121099] env[68285]: value = "task-2891900" [ 1080.121099] env[68285]: _type = "Task" [ 1080.121099] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.121329] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2b452a-5c31-4e57-9fd1-967c085462df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.124516] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46f3e46-6fef-4627-b982-98e105397ccc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.142495] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1080.142495] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521afe7f-8fa4-6e3d-83c9-0e56495ff151" [ 1080.142495] env[68285]: _type = "Task" [ 1080.142495] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.142708] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.161818] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521afe7f-8fa4-6e3d-83c9-0e56495ff151, 'name': SearchDatastore_Task, 'duration_secs': 0.01292} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.162041] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.162349] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1080.162624] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3a64fd7-cd56-4a5f-804f-56a365311fc6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.169616] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1080.169616] env[68285]: value = "task-2891901" [ 1080.169616] env[68285]: _type = "Task" [ 1080.169616] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.178249] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891901, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.419827] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c40b5d-116d-468c-be8f-73ad9f3db108 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.427625] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660cb7be-a8a0-4cc9-9c4f-98e4848a78c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.463265] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db2a985-4672-4745-99f6-006411d98f24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.474093] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869edeb0-d99b-4d8e-a36a-c3c0b5666cdd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.490243] env[68285]: DEBUG nova.compute.provider_tree [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1080.558588] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1c42043d-f8db-4cb9-8147-48d0d32c982b] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1080.561564] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17692a4a-6de6-4584-a25c-857855d92768 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.422s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.639424] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090975} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.639545] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1080.640422] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15919919-99f8-46b4-a288-68939f84f7f8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.663716] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.664110] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c979d27-b2b0-48c2-be87-fc0c311d6569 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.688516] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891901, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.690152] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1080.690152] env[68285]: value = "task-2891902" [ 1080.690152] env[68285]: _type = "Task" [ 1080.690152] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.698896] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891902, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.016426] env[68285]: ERROR nova.scheduler.client.report [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [req-d0a99a83-9e83-4f93-9018-0aacb82ff03d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d0a99a83-9e83-4f93-9018-0aacb82ff03d"}]} [ 1081.020304] env[68285]: DEBUG nova.network.neutron [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Successfully updated port: de764e97-9703-4359-9800-31118f814f1e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.042348] env[68285]: DEBUG nova.scheduler.client.report [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1081.060379] env[68285]: DEBUG nova.scheduler.client.report [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1081.060516] env[68285]: DEBUG nova.compute.provider_tree [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1081.062692] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 65f289bb-6e97-47ad-8531-c06a9cce302f] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1081.065580] env[68285]: DEBUG nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1081.081096] env[68285]: DEBUG nova.scheduler.client.report [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1081.102758] env[68285]: DEBUG nova.scheduler.client.report [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1081.187019] env[68285]: DEBUG nova.compute.manager [req-4f246945-4fc3-409e-baa0-b445055b03be req-3848d8ec-5deb-4fe7-8b34-d9114087bc91 service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Received event network-vif-plugged-de764e97-9703-4359-9800-31118f814f1e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.187256] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f246945-4fc3-409e-baa0-b445055b03be req-3848d8ec-5deb-4fe7-8b34-d9114087bc91 service nova] Acquiring lock "d0b04097-292a-47e7-8f14-199b1650dc2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.187463] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f246945-4fc3-409e-baa0-b445055b03be req-3848d8ec-5deb-4fe7-8b34-d9114087bc91 service nova] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.187630] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f246945-4fc3-409e-baa0-b445055b03be req-3848d8ec-5deb-4fe7-8b34-d9114087bc91 service nova] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.187798] env[68285]: DEBUG nova.compute.manager [req-4f246945-4fc3-409e-baa0-b445055b03be req-3848d8ec-5deb-4fe7-8b34-d9114087bc91 service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] No waiting events found dispatching network-vif-plugged-de764e97-9703-4359-9800-31118f814f1e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1081.187960] env[68285]: WARNING nova.compute.manager [req-4f246945-4fc3-409e-baa0-b445055b03be req-3848d8ec-5deb-4fe7-8b34-d9114087bc91 service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Received unexpected event network-vif-plugged-de764e97-9703-4359-9800-31118f814f1e for instance with vm_state building and task_state spawning. [ 1081.200487] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891901, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676738} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.201552] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1081.201670] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1081.201933] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9b05d56-fbc6-4150-91ea-d08f5ca3bc84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.209353] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891902, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.217062] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1081.217062] env[68285]: value = "task-2891903" [ 1081.217062] env[68285]: _type = "Task" [ 1081.217062] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.229048] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.477659] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7dd441-1b11-49ee-a4b6-fcfe4bb1475c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.500722] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2d8245-62b7-4dca-ba72-ee40d277dcd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.536520] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "refresh_cache-d0b04097-292a-47e7-8f14-199b1650dc2c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.536520] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "refresh_cache-d0b04097-292a-47e7-8f14-199b1650dc2c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.536520] env[68285]: DEBUG nova.network.neutron [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.538759] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc59c0fd-1173-4240-b705-3356c59b3108 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.548682] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e88b84-e72a-44ec-947b-4a134d1b025e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.567402] env[68285]: DEBUG nova.compute.provider_tree [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.569594] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: c8784827-a928-439d-abdf-d82b62a61152] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1081.588476] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.681673] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "a97df3d2-c182-46d8-95c2-61caccade285" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.681949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.682240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "a97df3d2-c182-46d8-95c2-61caccade285-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.682465] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.682632] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.684790] env[68285]: INFO nova.compute.manager [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Terminating instance [ 1081.701576] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891902, 'name': ReconfigVM_Task, 'duration_secs': 0.915245} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.701850] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Reconfigured VM instance instance-00000047 to attach disk [datastore1] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.702494] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da79f8e2-6082-4ac0-bd30-2986a16d68c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.709341] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1081.709341] env[68285]: value = "task-2891905" [ 1081.709341] env[68285]: _type = "Task" [ 1081.709341] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.718825] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891905, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.726511] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1341} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.726768] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1081.727632] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b5b95c-62d1-4a39-848c-1829199a7060 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.750983] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.751376] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c257fe86-1c6f-49e9-a3b7-1c9701bb6bad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.774768] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1081.774768] env[68285]: value = "task-2891906" [ 1081.774768] env[68285]: _type = "Task" [ 1081.774768] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.783982] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891906, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.853670] env[68285]: DEBUG oslo_concurrency.lockutils [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.853670] env[68285]: DEBUG oslo_concurrency.lockutils [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.854045] env[68285]: INFO nova.compute.manager [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Rebooting instance [ 1082.070637] env[68285]: DEBUG nova.network.neutron [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1082.073790] env[68285]: DEBUG nova.scheduler.client.report [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.078243] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 7dca07f4-78aa-45e4-954a-c9f4d58e7c84] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1082.188125] env[68285]: DEBUG nova.compute.manager [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1082.188474] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1082.189277] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488e580e-fb51-4024-bd5d-aa1a1d6b204c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.196461] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1082.196697] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba28679b-6134-4fcc-b8d1-c73f249634c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.202863] env[68285]: DEBUG oslo_vmware.api [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1082.202863] env[68285]: value = "task-2891907" [ 1082.202863] env[68285]: _type = "Task" [ 1082.202863] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.218505] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891905, 'name': Rename_Task, 'duration_secs': 0.216514} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.218777] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.219014] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3b7df09-e69d-46bd-bb94-647de2397177 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.226290] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1082.226290] env[68285]: value = "task-2891908" [ 1082.226290] env[68285]: _type = "Task" [ 1082.226290] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.236728] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.271476] env[68285]: DEBUG nova.network.neutron [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Updating instance_info_cache with network_info: [{"id": "de764e97-9703-4359-9800-31118f814f1e", "address": "fa:16:3e:ae:1f:f1", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde764e97-97", "ovs_interfaceid": "de764e97-9703-4359-9800-31118f814f1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.284744] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891906, 'name': ReconfigVM_Task, 'duration_secs': 0.315459} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.285025] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Reconfigured VM instance instance-00000046 to attach disk [datastore1] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1082.285666] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38fd261b-6d8c-4524-b2b8-a67e92aac8b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.293536] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1082.293536] env[68285]: value = "task-2891909" [ 1082.293536] env[68285]: _type = "Task" [ 1082.293536] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.307188] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891909, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.372295] env[68285]: DEBUG oslo_concurrency.lockutils [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.372479] env[68285]: DEBUG oslo_concurrency.lockutils [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquired lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.372655] env[68285]: DEBUG nova.network.neutron [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.582035] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.691s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.582387] env[68285]: DEBUG nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1082.585209] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 95f5e902-6385-4602-8458-7d7b2069a9da] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1082.586753] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.607s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.586949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.589605] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.003s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.589793] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.591615] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.651s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.593762] env[68285]: INFO nova.compute.claims [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1082.616826] env[68285]: INFO nova.scheduler.client.report [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Deleted allocations for instance 5e101d74-7a82-4118-8f4c-7af9a6b0917a [ 1082.619066] env[68285]: INFO nova.scheduler.client.report [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Deleted allocations for instance 2a9b3b56-8607-4da8-9186-8a933cfe0351 [ 1082.713150] env[68285]: DEBUG oslo_vmware.api [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891907, 'name': PowerOffVM_Task, 'duration_secs': 0.22529} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.713432] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.713598] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.713845] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72a04e22-b81b-43fb-89bb-c95a7a4a8b50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.736702] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891908, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.774071] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "refresh_cache-d0b04097-292a-47e7-8f14-199b1650dc2c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.774567] env[68285]: DEBUG nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Instance network_info: |[{"id": "de764e97-9703-4359-9800-31118f814f1e", "address": "fa:16:3e:ae:1f:f1", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde764e97-97", "ovs_interfaceid": "de764e97-9703-4359-9800-31118f814f1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1082.775163] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:1f:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de764e97-9703-4359-9800-31118f814f1e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.783932] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.785275] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1082.786019] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.786116] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.786227] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Deleting the datastore file [datastore2] a97df3d2-c182-46d8-95c2-61caccade285 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.786467] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db8cfc95-e159-4c5b-8561-cfa3feefb7a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.801092] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4b974c8-0c0d-469a-8789-edff305e9489 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.810449] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891909, 'name': Rename_Task, 'duration_secs': 0.184745} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.812461] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.812765] env[68285]: DEBUG oslo_vmware.api [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for the task: (returnval){ [ 1082.812765] env[68285]: value = "task-2891911" [ 1082.812765] env[68285]: _type = "Task" [ 1082.812765] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.812903] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.812903] env[68285]: value = "task-2891912" [ 1082.812903] env[68285]: _type = "Task" [ 1082.812903] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.813085] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4cb37c9-b8ff-4070-a768-a59afd89756d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.825108] env[68285]: DEBUG oslo_vmware.api [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.828994] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891912, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.829343] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1082.829343] env[68285]: value = "task-2891913" [ 1082.829343] env[68285]: _type = "Task" [ 1082.829343] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.839360] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.099576] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8bedba57-e7c8-4fa8-b171-f6d74550a31c] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1083.102213] env[68285]: DEBUG nova.compute.utils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1083.107247] env[68285]: DEBUG nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1083.107528] env[68285]: DEBUG nova.network.neutron [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1083.126484] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bd149319-9d6b-4691-8c49-b44656fe5cb8 tempest-ServerShowV257Test-289590961 tempest-ServerShowV257Test-289590961-project-member] Lock "2a9b3b56-8607-4da8-9186-8a933cfe0351" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.539s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.131572] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bbae3f3b-e753-43f8-8793-e3f49c902da3 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "5e101d74-7a82-4118-8f4c-7af9a6b0917a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.668s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.158780] env[68285]: DEBUG nova.policy [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb7f978e7fa64e88af5756fca97fce6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4471597d3345443aa28b97acd91847e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1083.238404] env[68285]: DEBUG oslo_vmware.api [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891908, 'name': PowerOnVM_Task, 'duration_secs': 1.008981} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.238757] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.239007] env[68285]: INFO nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Took 5.85 seconds to spawn the instance on the hypervisor. [ 1083.239272] env[68285]: DEBUG nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1083.240111] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e18e04a-0e8b-4af4-ab38-99054d8ffd2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.330919] env[68285]: DEBUG oslo_vmware.api [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Task: {'id': task-2891911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.488783} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.338265] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.338485] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1083.338671] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1083.338852] env[68285]: INFO nova.compute.manager [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1083.339198] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.339313] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891912, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.339579] env[68285]: DEBUG nova.compute.manager [-] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1083.339811] env[68285]: DEBUG nova.network.neutron [-] [instance: a97df3d2-c182-46d8-95c2-61caccade285] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1083.348505] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891913, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.357984] env[68285]: DEBUG nova.network.neutron [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Updating instance_info_cache with network_info: [{"id": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "address": "fa:16:3e:1f:33:15", "network": {"id": "ab5e0ed2-3b2f-4410-9b25-274de374d356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1040104261-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15d1105bcb418a89ad40e5505757bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa7f869-5c", "ovs_interfaceid": "9aa7f869-5c69-4616-a1c6-b2957527dd68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.423774] env[68285]: DEBUG nova.compute.manager [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Received event network-changed-de764e97-9703-4359-9800-31118f814f1e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.424048] env[68285]: DEBUG nova.compute.manager [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Refreshing instance network info cache due to event network-changed-de764e97-9703-4359-9800-31118f814f1e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1083.424190] env[68285]: DEBUG oslo_concurrency.lockutils [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] Acquiring lock "refresh_cache-d0b04097-292a-47e7-8f14-199b1650dc2c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.424402] env[68285]: DEBUG oslo_concurrency.lockutils [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] Acquired lock "refresh_cache-d0b04097-292a-47e7-8f14-199b1650dc2c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.424576] env[68285]: DEBUG nova.network.neutron [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Refreshing network info cache for port de764e97-9703-4359-9800-31118f814f1e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.604642] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: f0145d64-60e4-4ad5-a6ea-6c5d40780df5] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1083.611113] env[68285]: DEBUG nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1083.698777] env[68285]: DEBUG nova.network.neutron [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Successfully created port: a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1083.762285] env[68285]: INFO nova.compute.manager [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Took 54.89 seconds to build instance. [ 1083.832112] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891912, 'name': CreateVM_Task, 'duration_secs': 0.638981} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.835627] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.838626] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.838803] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.839211] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.839932] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-523ce85e-31b0-4272-84c0-b45037cd9029 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.844988] env[68285]: DEBUG oslo_vmware.api [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2891913, 'name': PowerOnVM_Task, 'duration_secs': 0.621598} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.845623] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.845851] env[68285]: INFO nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Took 9.91 seconds to spawn the instance on the hypervisor. [ 1083.846051] env[68285]: DEBUG nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1083.846798] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c35c544-8e13-4c35-819b-7caeca31cd6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.850566] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1083.850566] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5221e620-e3f3-c9c4-adea-6afa438d239d" [ 1083.850566] env[68285]: _type = "Task" [ 1083.850566] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.865416] env[68285]: DEBUG oslo_concurrency.lockutils [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Releasing lock "refresh_cache-2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.869081] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5221e620-e3f3-c9c4-adea-6afa438d239d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.020246] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff2c470-848d-4006-b0fe-e2483df6cc42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.028637] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e823150e-18ef-42a4-87ca-7ae2155e4614 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.064321] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddda209-97af-46a2-91a0-776641abc388 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.067857] env[68285]: DEBUG nova.compute.manager [req-2603c05d-3a35-4f8f-b247-c15ab06f7943 req-b3e9ab28-a8fd-48f0-a933-265b30a1d6d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Received event network-vif-deleted-84cbe58d-a7c4-4c42-9f87-9a6b62805b10 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1084.069355] env[68285]: INFO nova.compute.manager [req-2603c05d-3a35-4f8f-b247-c15ab06f7943 req-b3e9ab28-a8fd-48f0-a933-265b30a1d6d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Neutron deleted interface 84cbe58d-a7c4-4c42-9f87-9a6b62805b10; detaching it from the instance and deleting it from the info cache [ 1084.069355] env[68285]: DEBUG nova.network.neutron [req-2603c05d-3a35-4f8f-b247-c15ab06f7943 req-b3e9ab28-a8fd-48f0-a933-265b30a1d6d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.080475] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751bbb07-957b-46a1-8d53-f5e7ac9a853a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.095853] env[68285]: DEBUG nova.compute.provider_tree [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.111530] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 0d13cc84-bbf2-4e8b-8344-d69acac6bd35] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1084.267446] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bb949fb0-e377-4537-b29b-77e1c9526d79 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "bb806297-47c6-45b7-a177-f3300fa1e29a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.407s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.277967] env[68285]: DEBUG nova.network.neutron [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Updated VIF entry in instance network info cache for port de764e97-9703-4359-9800-31118f814f1e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.278421] env[68285]: DEBUG nova.network.neutron [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Updating instance_info_cache with network_info: [{"id": "de764e97-9703-4359-9800-31118f814f1e", "address": "fa:16:3e:ae:1f:f1", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde764e97-97", "ovs_interfaceid": "de764e97-9703-4359-9800-31118f814f1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.360383] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5221e620-e3f3-c9c4-adea-6afa438d239d, 'name': SearchDatastore_Task, 'duration_secs': 0.021397} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.360697] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.360927] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.361178] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.361327] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.361520] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.361795] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c74d271-ff9b-4fbb-81bd-1f785b22acdd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.368150] env[68285]: DEBUG nova.compute.manager [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1084.368150] env[68285]: DEBUG nova.network.neutron [-] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.369352] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6897b399-63df-4b19-9bf2-62efdb092334 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.380018] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.380018] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.380985] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-004dac8b-4157-47b0-87fd-6b345a520878 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.385464] env[68285]: INFO nova.compute.manager [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Took 67.76 seconds to build instance. [ 1084.392773] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1084.392773] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f03002-e3b7-14fc-aee0-bd908e36c3cf" [ 1084.392773] env[68285]: _type = "Task" [ 1084.392773] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.404182] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f03002-e3b7-14fc-aee0-bd908e36c3cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.571382] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57a08010-774d-4492-b975-fbaef05f2a50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.584590] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759cc8c2-d17f-4838-a955-3de1c5b06fbe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.599050] env[68285]: DEBUG nova.scheduler.client.report [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.617217] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: ee45231a-80f2-49b9-8bc7-03a0c920a668] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1084.619068] env[68285]: DEBUG nova.compute.manager [req-2603c05d-3a35-4f8f-b247-c15ab06f7943 req-b3e9ab28-a8fd-48f0-a933-265b30a1d6d1 service nova] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Detach interface failed, port_id=84cbe58d-a7c4-4c42-9f87-9a6b62805b10, reason: Instance a97df3d2-c182-46d8-95c2-61caccade285 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1084.620869] env[68285]: DEBUG nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1084.644080] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1084.644329] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1084.644488] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1084.644670] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1084.644813] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1084.644958] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1084.645218] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1084.645381] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1084.645544] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1084.645703] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1084.645873] env[68285]: DEBUG nova.virt.hardware [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1084.646729] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bde9d8-7922-4e9c-872d-69e17395505a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.655207] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e1bae7-a44a-4212-9fa0-0e9f219af606 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.781224] env[68285]: DEBUG oslo_concurrency.lockutils [req-61ed38f6-8605-4e61-86c5-dcc4e67a481c req-71e599f8-b340-4dd0-8580-25afd5ed538e service nova] Releasing lock "refresh_cache-d0b04097-292a-47e7-8f14-199b1650dc2c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.870095] env[68285]: INFO nova.compute.manager [-] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Took 1.53 seconds to deallocate network for instance. [ 1084.888893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-461a4285-090e-4559-ad1c-213104754202 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.264s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.908286] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f03002-e3b7-14fc-aee0-bd908e36c3cf, 'name': SearchDatastore_Task, 'duration_secs': 0.015813} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.908286] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-548f4a80-1ffd-4ff9-8c06-ddd1c671b90c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.916333] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1084.916333] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5215889f-0443-dca2-f9fa-143560cc2cb0" [ 1084.916333] env[68285]: _type = "Task" [ 1084.916333] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.925372] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5215889f-0443-dca2-f9fa-143560cc2cb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.105530] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.106096] env[68285]: DEBUG nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1085.108666] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.387s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.110103] env[68285]: INFO nova.compute.claims [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1085.124057] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 81fe4854-1094-4c42-9df5-05325d961146] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1085.197963] env[68285]: INFO nova.compute.manager [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Rebuilding instance [ 1085.243781] env[68285]: DEBUG nova.compute.manager [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.244698] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64788a0-c16c-4083-8148-647ac944e9d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.378012] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.402552] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467f2ad3-daf8-4aec-8fc8-cd95d50785df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.412309] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Doing hard reboot of VM {{(pid=68285) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1085.413018] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-77bc6d15-2817-4ff6-ad0d-d0409b9dab70 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.424549] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5215889f-0443-dca2-f9fa-143560cc2cb0, 'name': SearchDatastore_Task, 'duration_secs': 0.018637} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.428069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.428069] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] d0b04097-292a-47e7-8f14-199b1650dc2c/d0b04097-292a-47e7-8f14-199b1650dc2c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.428069] env[68285]: DEBUG oslo_vmware.api [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1085.428069] env[68285]: value = "task-2891914" [ 1085.428069] env[68285]: _type = "Task" [ 1085.428069] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.428069] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84b858d1-35e1-4073-bd73-b29787983117 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.436829] env[68285]: DEBUG oslo_vmware.api [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891914, 'name': ResetVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.441285] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1085.441285] env[68285]: value = "task-2891915" [ 1085.441285] env[68285]: _type = "Task" [ 1085.441285] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.451627] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.490214] env[68285]: DEBUG nova.network.neutron [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Successfully updated port: a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1085.614504] env[68285]: DEBUG nova.compute.utils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1085.618681] env[68285]: DEBUG nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1085.618681] env[68285]: DEBUG nova.network.neutron [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1085.627851] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: e3b01f87-6a4c-4127-9204-2bfa5ff28f38] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1085.662691] env[68285]: DEBUG nova.policy [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '184360cab7224b9eaef80dfe89d0208b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288595d9298e43fa859bc6b68054aa08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1085.726385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.726869] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.728435] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Acquiring lock "94652533-8c34-42fa-8d70-4effc307ec71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.728708] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "94652533-8c34-42fa-8d70-4effc307ec71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.940536] env[68285]: DEBUG oslo_vmware.api [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891914, 'name': ResetVM_Task, 'duration_secs': 0.105557} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.944011] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Did hard reboot of VM {{(pid=68285) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1085.944393] env[68285]: DEBUG nova.compute.manager [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.945296] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb43390-952e-45fd-9ac2-9479fc13a766 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.955540] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891915, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.966717] env[68285]: DEBUG nova.network.neutron [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Successfully created port: b1736f52-bada-4b08-820b-ac312cd00b5b {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1085.994068] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.994068] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.994068] env[68285]: DEBUG nova.network.neutron [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1086.122026] env[68285]: DEBUG nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1086.135237] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9f4b2b94-ec19-4a8e-8663-ab71c417d093] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1086.233863] env[68285]: DEBUG nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1086.235725] env[68285]: DEBUG nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1086.260403] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.261135] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d92e2fe3-58fc-4a34-a8b0-d298238e73a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.270022] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1086.270022] env[68285]: value = "task-2891916" [ 1086.270022] env[68285]: _type = "Task" [ 1086.270022] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.280026] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.341519] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.341802] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.435141] env[68285]: DEBUG nova.compute.manager [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Received event network-vif-plugged-a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1086.435407] env[68285]: DEBUG oslo_concurrency.lockutils [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] Acquiring lock "9c190abd-23ee-4e8e-8b91-9050847581d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.435579] env[68285]: DEBUG oslo_concurrency.lockutils [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.435780] env[68285]: DEBUG oslo_concurrency.lockutils [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.436021] env[68285]: DEBUG nova.compute.manager [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] No waiting events found dispatching network-vif-plugged-a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1086.436472] env[68285]: WARNING nova.compute.manager [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Received unexpected event network-vif-plugged-a9d34554-5a11-451d-b371-8a0cdfc63de6 for instance with vm_state building and task_state spawning. [ 1086.436697] env[68285]: DEBUG nova.compute.manager [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Received event network-changed-a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1086.436927] env[68285]: DEBUG nova.compute.manager [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Refreshing instance network info cache due to event network-changed-a9d34554-5a11-451d-b371-8a0cdfc63de6. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1086.437131] env[68285]: DEBUG oslo_concurrency.lockutils [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] Acquiring lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.450874] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891915, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670635} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.451337] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] d0b04097-292a-47e7-8f14-199b1650dc2c/d0b04097-292a-47e7-8f14-199b1650dc2c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.451556] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.454374] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-168e940f-3ebd-4380-8542-6ee6fbc358b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.463215] env[68285]: DEBUG oslo_concurrency.lockutils [None req-be3677c6-1d1b-4ef2-868a-775c31c262a0 tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.609s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.465781] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1086.465781] env[68285]: value = "task-2891917" [ 1086.465781] env[68285]: _type = "Task" [ 1086.465781] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.479935] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.531446] env[68285]: DEBUG nova.network.neutron [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1086.556569] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88022e94-5e1a-4bec-9f91-e96241292e80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.564936] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc7438c-7a13-4ff3-9d1c-bdcbc9c74d1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.605195] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc6975c-0801-47db-bf26-902e5c7c3705 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.613273] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdaf4c1-d26e-43ab-869a-996f10e02aaf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.635170] env[68285]: DEBUG nova.compute.provider_tree [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.642511] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 87582063-50f9-4518-ad2d-915c9cd49b19] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1086.718260] env[68285]: DEBUG nova.network.neutron [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updating instance_info_cache with network_info: [{"id": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "address": "fa:16:3e:d1:af:88", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9d34554-5a", "ovs_interfaceid": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.752848] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.754157] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.778683] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891916, 'name': PowerOffVM_Task, 'duration_secs': 0.220822} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.779347] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.779635] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.780410] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9f12ad-54e7-4466-b852-95fc97ed0784 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.786702] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1086.786933] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a3d3b74-a347-4823-b801-0322ecb40c6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.813649] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.813882] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.814077] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Deleting the datastore file [datastore1] bb806297-47c6-45b7-a177-f3300fa1e29a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.814339] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c15d8a5e-5917-4fbb-b514-e66299868368 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.820540] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1086.820540] env[68285]: value = "task-2891919" [ 1086.820540] env[68285]: _type = "Task" [ 1086.820540] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.828286] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.977916] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072253} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.978204] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.978966] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25abf544-8572-4ea0-ae45-8d307551cc47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.000332] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] d0b04097-292a-47e7-8f14-199b1650dc2c/d0b04097-292a-47e7-8f14-199b1650dc2c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.001414] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-307f7744-541d-4a6c-9354-1b2ea0bc70a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.023953] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1087.023953] env[68285]: value = "task-2891920" [ 1087.023953] env[68285]: _type = "Task" [ 1087.023953] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.034344] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891920, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.144427] env[68285]: DEBUG nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1087.147428] env[68285]: DEBUG nova.scheduler.client.report [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.150887] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 12fad42a-1011-4563-b11f-7b141b2a1670] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1087.173791] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.174045] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.174207] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.174392] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.174538] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.174683] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.174885] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.175057] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.175227] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.175388] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.175558] env[68285]: DEBUG nova.virt.hardware [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.176993] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e558a84-86b7-4ca6-afc8-2acfaf49607f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.185760] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13857cb1-aab3-40db-b213-d6644a4b5412 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.206526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.206753] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.206948] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.207145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.207345] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.209339] env[68285]: INFO nova.compute.manager [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Terminating instance [ 1087.220711] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.220996] env[68285]: DEBUG nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Instance network_info: |[{"id": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "address": "fa:16:3e:d1:af:88", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9d34554-5a", "ovs_interfaceid": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1087.221549] env[68285]: DEBUG oslo_concurrency.lockutils [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] Acquired lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.221736] env[68285]: DEBUG nova.network.neutron [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Refreshing network info cache for port a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1087.222684] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:af:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9d34554-5a11-451d-b371-8a0cdfc63de6', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.231181] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.232481] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.232733] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5cddaf37-1685-46cb-8779-be30867af0f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.254031] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.254031] env[68285]: value = "task-2891921" [ 1087.254031] env[68285]: _type = "Task" [ 1087.254031] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.266266] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891921, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.331378] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891919, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.535930] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891920, 'name': ReconfigVM_Task, 'duration_secs': 0.510061} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.536273] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Reconfigured VM instance instance-00000048 to attach disk [datastore2] d0b04097-292a-47e7-8f14-199b1650dc2c/d0b04097-292a-47e7-8f14-199b1650dc2c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.536932] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29f1b2f3-1bc2-4eda-b123-10d380e7f4d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.544143] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1087.544143] env[68285]: value = "task-2891922" [ 1087.544143] env[68285]: _type = "Task" [ 1087.544143] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.551520] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891922, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.653749] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.545s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.654283] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1087.656900] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 5b58896c-cb07-48c8-ace0-385486a3e19d] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1087.658656] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.893s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.660076] env[68285]: INFO nova.compute.claims [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.713666] env[68285]: DEBUG nova.compute.manager [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1087.713894] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1087.714947] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d517ef5-2231-43f4-9af6-1f2678edac1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.724318] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1087.724318] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fb96c61-1617-4f22-b322-807eefa90fba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.731718] env[68285]: DEBUG oslo_vmware.api [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1087.731718] env[68285]: value = "task-2891923" [ 1087.731718] env[68285]: _type = "Task" [ 1087.731718] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.735765] env[68285]: DEBUG nova.network.neutron [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Successfully updated port: b1736f52-bada-4b08-820b-ac312cd00b5b {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1087.750905] env[68285]: DEBUG oslo_vmware.api [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.763841] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891921, 'name': CreateVM_Task, 'duration_secs': 0.386199} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.763841] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1087.764510] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.764676] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.765032] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1087.765308] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a814155-94fe-41b5-a078-c4905815c794 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.769856] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1087.769856] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522820c9-5829-9be8-765a-92bb4c00f26a" [ 1087.769856] env[68285]: _type = "Task" [ 1087.769856] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.779507] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522820c9-5829-9be8-765a-92bb4c00f26a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.832540] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.510066} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.832796] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.832977] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.833171] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.949881] env[68285]: DEBUG nova.network.neutron [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updated VIF entry in instance network info cache for port a9d34554-5a11-451d-b371-8a0cdfc63de6. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.950315] env[68285]: DEBUG nova.network.neutron [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updating instance_info_cache with network_info: [{"id": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "address": "fa:16:3e:d1:af:88", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9d34554-5a", "ovs_interfaceid": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.054340] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891922, 'name': Rename_Task, 'duration_secs': 0.219278} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.054631] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1088.054859] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f65a0eba-bf7c-4247-9aa5-07be54773c5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.062027] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1088.062027] env[68285]: value = "task-2891924" [ 1088.062027] env[68285]: _type = "Task" [ 1088.062027] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.070169] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.161297] env[68285]: DEBUG nova.compute.utils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1088.163456] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1088.163605] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1088.165757] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 11de7da5-1d73-4536-b2a1-f7dbbdec14b8] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1088.208738] env[68285]: DEBUG nova.policy [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48401b0b09a2477db2a87df4835c70a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b390e5b4080a4984a3f935e9e6a0dd2a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1088.242260] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "refresh_cache-3858399e-9fc4-4d60-a9d5-95caefb7bd87" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.242404] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "refresh_cache-3858399e-9fc4-4d60-a9d5-95caefb7bd87" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.242549] env[68285]: DEBUG nova.network.neutron [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1088.243651] env[68285]: DEBUG oslo_vmware.api [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891923, 'name': PowerOffVM_Task, 'duration_secs': 0.181417} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.243862] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1088.244065] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1088.244275] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7634858f-07e4-4b41-9815-78caca7dc86a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.282276] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522820c9-5829-9be8-765a-92bb4c00f26a, 'name': SearchDatastore_Task, 'duration_secs': 0.020309} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.282588] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.282821] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.283115] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.283273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.283456] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.283715] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e2a1ecc-e3f7-4200-a11b-1034cee7d67d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.292675] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.292891] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1088.293823] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e2c8ee4-f0da-4f9a-bb0a-5aff23886307 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.299734] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1088.299734] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ee5548-390a-9e35-4301-cdaeef52557a" [ 1088.299734] env[68285]: _type = "Task" [ 1088.299734] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.309921] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1088.310472] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1088.310472] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Deleting the datastore file [datastore1] 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1088.312959] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb0d246e-e5c7-407c-85f6-579268e41060 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.314737] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ee5548-390a-9e35-4301-cdaeef52557a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.319540] env[68285]: DEBUG oslo_vmware.api [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for the task: (returnval){ [ 1088.319540] env[68285]: value = "task-2891926" [ 1088.319540] env[68285]: _type = "Task" [ 1088.319540] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.327108] env[68285]: DEBUG oslo_vmware.api [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.453842] env[68285]: DEBUG oslo_concurrency.lockutils [req-dfb81af9-1643-4ed9-b4f4-d67d0fc1ebaf req-bb918adf-e683-4e2e-8639-27bbe5059f41 service nova] Releasing lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.464480] env[68285]: DEBUG nova.compute.manager [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Received event network-vif-plugged-b1736f52-bada-4b08-820b-ac312cd00b5b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1088.464729] env[68285]: DEBUG oslo_concurrency.lockutils [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] Acquiring lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.464939] env[68285]: DEBUG oslo_concurrency.lockutils [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.465118] env[68285]: DEBUG oslo_concurrency.lockutils [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.465285] env[68285]: DEBUG nova.compute.manager [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] No waiting events found dispatching network-vif-plugged-b1736f52-bada-4b08-820b-ac312cd00b5b {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1088.465607] env[68285]: WARNING nova.compute.manager [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Received unexpected event network-vif-plugged-b1736f52-bada-4b08-820b-ac312cd00b5b for instance with vm_state building and task_state spawning. [ 1088.465607] env[68285]: DEBUG nova.compute.manager [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Received event network-changed-b1736f52-bada-4b08-820b-ac312cd00b5b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1088.465739] env[68285]: DEBUG nova.compute.manager [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Refreshing instance network info cache due to event network-changed-b1736f52-bada-4b08-820b-ac312cd00b5b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1088.465902] env[68285]: DEBUG oslo_concurrency.lockutils [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] Acquiring lock "refresh_cache-3858399e-9fc4-4d60-a9d5-95caefb7bd87" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.489511] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Successfully created port: e4eda400-b7bd-4283-bcbd-ff116b289b03 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.575774] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891924, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.664248] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1088.671639] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: fe9a8a13-73ec-4556-a62c-cc49fd01f539] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1088.777757] env[68285]: DEBUG nova.network.neutron [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1088.810282] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ee5548-390a-9e35-4301-cdaeef52557a, 'name': SearchDatastore_Task, 'duration_secs': 0.013093} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.811252] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58a9bab7-d819-440d-bf66-44b75750b430 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.821542] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1088.821542] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52173888-b474-8aaf-b845-5e7574639bac" [ 1088.821542] env[68285]: _type = "Task" [ 1088.821542] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.837530] env[68285]: DEBUG oslo_vmware.api [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Task: {'id': task-2891926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253044} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.840506] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.840701] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1088.840877] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1088.841057] env[68285]: INFO nova.compute.manager [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1088.841300] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.843846] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52173888-b474-8aaf-b845-5e7574639bac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.846306] env[68285]: DEBUG nova.compute.manager [-] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1088.846411] env[68285]: DEBUG nova.network.neutron [-] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1088.872946] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1088.873209] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1088.873366] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1088.873549] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1088.873695] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1088.873928] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1088.874062] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1088.874231] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1088.874394] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1088.874558] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1088.874730] env[68285]: DEBUG nova.virt.hardware [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1088.876033] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a128c620-de5b-4d0f-81eb-df23c220558a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.884682] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead9bc8c-fe39-499c-8d6d-003d459716f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.901403] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.907196] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.912978] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.913429] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbc84402-1da4-4384-8577-ffcaa15791af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.933255] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.933255] env[68285]: value = "task-2891927" [ 1088.933255] env[68285]: _type = "Task" [ 1088.933255] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.946723] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891927, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.960654] env[68285]: DEBUG nova.network.neutron [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Updating instance_info_cache with network_info: [{"id": "b1736f52-bada-4b08-820b-ac312cd00b5b", "address": "fa:16:3e:65:04:21", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1736f52-ba", "ovs_interfaceid": "b1736f52-bada-4b08-820b-ac312cd00b5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.075052] env[68285]: DEBUG oslo_vmware.api [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891924, 'name': PowerOnVM_Task, 'duration_secs': 0.690202} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.075052] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1089.075052] env[68285]: INFO nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1089.075262] env[68285]: DEBUG nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1089.075892] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452adda1-6fd3-41d9-9e20-1121d5fb97aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.085152] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f747f2-f9fc-4dbb-8159-fdbecc77d757 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.092284] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092f78dd-3896-4c21-8414-a797a0cd4833 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.140157] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26325612-b03b-488b-8050-6b5a51fa3dc8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.149556] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85991fb-dcc3-4a32-8a1b-412170e74cb1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.166280] env[68285]: DEBUG nova.compute.provider_tree [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.176647] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 52fbfbe4-1807-4d6d-9139-ebe30e6bf647] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1089.337976] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52173888-b474-8aaf-b845-5e7574639bac, 'name': SearchDatastore_Task, 'duration_secs': 0.023189} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.337976] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.338453] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1089.338532] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36b09c55-97a1-4294-b91a-7da5414c8b31 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.345302] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1089.345302] env[68285]: value = "task-2891928" [ 1089.345302] env[68285]: _type = "Task" [ 1089.345302] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.353476] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.411646] env[68285]: DEBUG nova.compute.manager [req-c9448217-8d4c-4304-9eb8-3fb7c91c0352 req-bd69c1fd-436d-4df5-8a98-6d4b0daf645c service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Received event network-vif-deleted-9aa7f869-5c69-4616-a1c6-b2957527dd68 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1089.411844] env[68285]: INFO nova.compute.manager [req-c9448217-8d4c-4304-9eb8-3fb7c91c0352 req-bd69c1fd-436d-4df5-8a98-6d4b0daf645c service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Neutron deleted interface 9aa7f869-5c69-4616-a1c6-b2957527dd68; detaching it from the instance and deleting it from the info cache [ 1089.411948] env[68285]: DEBUG nova.network.neutron [req-c9448217-8d4c-4304-9eb8-3fb7c91c0352 req-bd69c1fd-436d-4df5-8a98-6d4b0daf645c service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.443834] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891927, 'name': CreateVM_Task, 'duration_secs': 0.28303} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.444102] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1089.444470] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.444629] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.444932] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1089.445451] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b3194de-a010-47ab-8eee-554e5d004993 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.450315] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1089.450315] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529a7f68-ab42-1d9e-d340-74fd0754bce3" [ 1089.450315] env[68285]: _type = "Task" [ 1089.450315] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.460470] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529a7f68-ab42-1d9e-d340-74fd0754bce3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.463975] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "refresh_cache-3858399e-9fc4-4d60-a9d5-95caefb7bd87" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.464350] env[68285]: DEBUG nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Instance network_info: |[{"id": "b1736f52-bada-4b08-820b-ac312cd00b5b", "address": "fa:16:3e:65:04:21", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1736f52-ba", "ovs_interfaceid": "b1736f52-bada-4b08-820b-ac312cd00b5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1089.464568] env[68285]: DEBUG oslo_concurrency.lockutils [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] Acquired lock "refresh_cache-3858399e-9fc4-4d60-a9d5-95caefb7bd87" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.464758] env[68285]: DEBUG nova.network.neutron [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Refreshing network info cache for port b1736f52-bada-4b08-820b-ac312cd00b5b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.467049] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:04:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1736f52-bada-4b08-820b-ac312cd00b5b', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1089.474229] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating folder: Project (288595d9298e43fa859bc6b68054aa08). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1089.475123] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24cbfe4c-8202-4778-83b1-08176e38c6d7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.485362] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created folder: Project (288595d9298e43fa859bc6b68054aa08) in parent group-v580775. [ 1089.485546] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating folder: Instances. Parent ref: group-v580986. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1089.485817] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3264556d-4753-4f6a-9998-53ac28ac776b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.495054] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created folder: Instances in parent group-v580986. [ 1089.495288] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.495468] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1089.495653] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19ea154c-179c-4740-86dd-6b1c1e6bdf38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.516244] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1089.516244] env[68285]: value = "task-2891931" [ 1089.516244] env[68285]: _type = "Task" [ 1089.516244] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.523831] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891931, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.596387] env[68285]: INFO nova.compute.manager [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Took 58.09 seconds to build instance. [ 1089.670514] env[68285]: DEBUG nova.scheduler.client.report [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.677035] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1089.681507] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: f26a5b02-c71f-4f04-a8b2-4e284a6e37a6] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1089.710216] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.710651] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.710830] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.711070] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.712860] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.712860] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.712860] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.712860] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.712860] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.712860] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.712860] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.714117] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8cb889-6394-4322-acd4-0694a531857a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.724983] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a623c9bd-3619-486a-bde5-895578819979 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.768255] env[68285]: DEBUG nova.network.neutron [-] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.858223] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891928, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.914779] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-478212eb-71db-44c9-a550-fe14b9e8eb5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.925135] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddfac59-22c4-4042-b606-8c5080870ecf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.963910] env[68285]: DEBUG nova.compute.manager [req-c9448217-8d4c-4304-9eb8-3fb7c91c0352 req-bd69c1fd-436d-4df5-8a98-6d4b0daf645c service nova] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Detach interface failed, port_id=9aa7f869-5c69-4616-a1c6-b2957527dd68, reason: Instance 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1089.975247] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529a7f68-ab42-1d9e-d340-74fd0754bce3, 'name': SearchDatastore_Task, 'duration_secs': 0.012147} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.975544] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.976400] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.976400] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.976400] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.976400] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.976841] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d3fb627-d1c3-4921-b058-74140919dae8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.985631] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.985879] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.986549] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0714ca5f-4eb1-4bf4-8233-339508eb54c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.991738] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1089.991738] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5280c393-8301-c4c9-3586-f3858bce8f56" [ 1089.991738] env[68285]: _type = "Task" [ 1089.991738] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.999439] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5280c393-8301-c4c9-3586-f3858bce8f56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.002072] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Successfully updated port: e4eda400-b7bd-4283-bcbd-ff116b289b03 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1090.025750] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891931, 'name': CreateVM_Task, 'duration_secs': 0.484238} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.025935] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1090.026610] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.027722] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.027722] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1090.027722] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-921146cb-e626-47a8-bccd-aed977ec2750 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.032265] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1090.032265] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c2d693-f589-cfd4-414a-06e1739885bd" [ 1090.032265] env[68285]: _type = "Task" [ 1090.032265] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.040639] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c2d693-f589-cfd4-414a-06e1739885bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.098420] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bfbdbfa8-4cbe-42dd-88d9-f039233f3101 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.601s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.180869] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.181398] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1090.188577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.877s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.190061] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 682c3b6e-a605-486a-86c8-af173d80cbcf] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1090.270288] env[68285]: INFO nova.compute.manager [-] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Took 1.42 seconds to deallocate network for instance. [ 1090.362066] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891928, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572911} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.362342] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.362539] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.362791] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f14c5ee1-3e93-4740-961a-0d1e5f5eaa99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.369654] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1090.369654] env[68285]: value = "task-2891932" [ 1090.369654] env[68285]: _type = "Task" [ 1090.369654] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.377694] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891932, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.502672] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5280c393-8301-c4c9-3586-f3858bce8f56, 'name': SearchDatastore_Task, 'duration_secs': 0.014432} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.503521] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81a2c3e0-20e5-4e64-825e-524dc345c761 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.508411] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1090.508411] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52905799-eed3-d17b-1ea7-a42f2ad8003f" [ 1090.508411] env[68285]: _type = "Task" [ 1090.508411] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.508963] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "refresh_cache-cbf2a387-8a5a-4400-833b-e04e23ca42f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.509108] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "refresh_cache-cbf2a387-8a5a-4400-833b-e04e23ca42f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.509307] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1090.524261] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52905799-eed3-d17b-1ea7-a42f2ad8003f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.544822] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c2d693-f589-cfd4-414a-06e1739885bd, 'name': SearchDatastore_Task, 'duration_secs': 0.01108} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.545963] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.545963] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1090.545963] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.558164] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.576536] env[68285]: DEBUG nova.network.neutron [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Updated VIF entry in instance network info cache for port b1736f52-bada-4b08-820b-ac312cd00b5b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.576875] env[68285]: DEBUG nova.network.neutron [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Updating instance_info_cache with network_info: [{"id": "b1736f52-bada-4b08-820b-ac312cd00b5b", "address": "fa:16:3e:65:04:21", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1736f52-ba", "ovs_interfaceid": "b1736f52-bada-4b08-820b-ac312cd00b5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.601231] env[68285]: DEBUG nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1090.632479] env[68285]: DEBUG nova.compute.manager [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Received event network-vif-plugged-e4eda400-b7bd-4283-bcbd-ff116b289b03 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.632714] env[68285]: DEBUG oslo_concurrency.lockutils [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] Acquiring lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.632904] env[68285]: DEBUG oslo_concurrency.lockutils [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.633135] env[68285]: DEBUG oslo_concurrency.lockutils [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.633319] env[68285]: DEBUG nova.compute.manager [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] No waiting events found dispatching network-vif-plugged-e4eda400-b7bd-4283-bcbd-ff116b289b03 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1090.633484] env[68285]: WARNING nova.compute.manager [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Received unexpected event network-vif-plugged-e4eda400-b7bd-4283-bcbd-ff116b289b03 for instance with vm_state building and task_state spawning. [ 1090.633642] env[68285]: DEBUG nova.compute.manager [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Received event network-changed-e4eda400-b7bd-4283-bcbd-ff116b289b03 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.633791] env[68285]: DEBUG nova.compute.manager [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Refreshing instance network info cache due to event network-changed-e4eda400-b7bd-4283-bcbd-ff116b289b03. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1090.633953] env[68285]: DEBUG oslo_concurrency.lockutils [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] Acquiring lock "refresh_cache-cbf2a387-8a5a-4400-833b-e04e23ca42f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.688461] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Updating instance_info_cache with network_info: [{"id": "e4eda400-b7bd-4283-bcbd-ff116b289b03", "address": "fa:16:3e:5f:4a:40", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eda400-b7", "ovs_interfaceid": "e4eda400-b7bd-4283-bcbd-ff116b289b03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.694275] env[68285]: INFO nova.compute.claims [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1090.698339] env[68285]: DEBUG nova.compute.utils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.699499] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9b34b7bf-c0f4-488d-bb38-61c0a5cabbfa] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1090.701641] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.701802] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.738844] env[68285]: DEBUG nova.policy [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48401b0b09a2477db2a87df4835c70a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b390e5b4080a4984a3f935e9e6a0dd2a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.777530] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.858303] env[68285]: DEBUG nova.compute.manager [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1090.859297] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94133ab7-9125-4c59-9c32-8ce823103a28 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.878372] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891932, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065131} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.878620] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1090.879390] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5479ef7c-a93e-4bcb-9a2f-b2f9044f2048 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.900571] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.901630] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e56dd3ad-f4c3-4eef-836f-6515f206670e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.924381] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1090.924381] env[68285]: value = "task-2891933" [ 1090.924381] env[68285]: _type = "Task" [ 1090.924381] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.932395] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891933, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.970321] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Successfully created port: ea8eb01e-5384-4256-a1e0-75cf523ec6b9 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.018854] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52905799-eed3-d17b-1ea7-a42f2ad8003f, 'name': SearchDatastore_Task, 'duration_secs': 0.050623} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.019172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.020087] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1091.020087] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.020087] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.020314] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b6c5788-eb57-4266-9ebd-35c1c9389033 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.022060] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a0ef1c2-2714-45e3-a300-ce96cba08914 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.029539] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1091.029539] env[68285]: value = "task-2891934" [ 1091.029539] env[68285]: _type = "Task" [ 1091.029539] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.033772] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.033972] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.034986] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31f8c777-08ad-41a4-8784-867169b32f1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.042567] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.045605] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1091.045605] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ef598e-24b0-53a9-533d-884d21b97c18" [ 1091.045605] env[68285]: _type = "Task" [ 1091.045605] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.053305] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ef598e-24b0-53a9-533d-884d21b97c18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.079276] env[68285]: DEBUG oslo_concurrency.lockutils [req-05d5adfd-714e-473e-80e7-875a7236a0d9 req-b03bc4b8-8fb9-41d5-a4f2-5eeb9cf25413 service nova] Releasing lock "refresh_cache-3858399e-9fc4-4d60-a9d5-95caefb7bd87" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.132624] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.190587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "refresh_cache-cbf2a387-8a5a-4400-833b-e04e23ca42f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.190977] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Instance network_info: |[{"id": "e4eda400-b7bd-4283-bcbd-ff116b289b03", "address": "fa:16:3e:5f:4a:40", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eda400-b7", "ovs_interfaceid": "e4eda400-b7bd-4283-bcbd-ff116b289b03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1091.191313] env[68285]: DEBUG oslo_concurrency.lockutils [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] Acquired lock "refresh_cache-cbf2a387-8a5a-4400-833b-e04e23ca42f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.191494] env[68285]: DEBUG nova.network.neutron [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Refreshing network info cache for port e4eda400-b7bd-4283-bcbd-ff116b289b03 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1091.192748] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:4a:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4eda400-b7bd-4283-bcbd-ff116b289b03', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1091.205873] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1091.207532] env[68285]: INFO nova.compute.resource_tracker [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating resource usage from migration cfcece04-fb70-4f24-94cf-cf4bae5a06fb [ 1091.210729] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1091.213424] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 105f0ad6-1591-40b9-997c-280860bd6501] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1091.215578] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1091.216921] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b434a0b4-0a53-43a3-afaa-b0fe8253b28b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.242360] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1091.242360] env[68285]: value = "task-2891935" [ 1091.242360] env[68285]: _type = "Task" [ 1091.242360] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.251537] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891935, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.371052] env[68285]: INFO nova.compute.manager [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] instance snapshotting [ 1091.373337] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aa946b-8d17-4905-bfa1-5cc391714ecc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.397910] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a17d4d-d88a-4df6-be44-e694e418ace1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.435996] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891933, 'name': ReconfigVM_Task, 'duration_secs': 0.31232} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.435996] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.438957] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b3b9b06-7b8d-408e-8ed8-a1dd4b30fdb2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.446634] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1091.446634] env[68285]: value = "task-2891936" [ 1091.446634] env[68285]: _type = "Task" [ 1091.446634] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.457268] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891936, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.539354] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891934, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.555648] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ef598e-24b0-53a9-533d-884d21b97c18, 'name': SearchDatastore_Task, 'duration_secs': 0.013493} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.560036] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db17c1be-581f-4660-bbbb-a544bccde9c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.565865] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1091.565865] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524e9c4c-fdd3-bae0-dc69-988845a84871" [ 1091.565865] env[68285]: _type = "Task" [ 1091.565865] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.576977] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524e9c4c-fdd3-bae0-dc69-988845a84871, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.602198] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b607decc-904e-43ca-aa09-76c4a8d619c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.612154] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f39e43-409e-4261-a3e0-0bd2ae139795 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.661300] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21e019d-5524-42e9-8b1d-f6dbd280a2f8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.673696] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d45083b-25ea-466a-9fb9-5a4ee0f9cba1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.694377] env[68285]: DEBUG nova.compute.provider_tree [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.716222] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: ef0636f4-3149-44e8-a4a3-62b9ede5dc28] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1091.753222] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891935, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.912054] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1091.912371] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f735b89a-381e-49ac-a475-88c80ebfc5dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.919996] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1091.919996] env[68285]: value = "task-2891937" [ 1091.919996] env[68285]: _type = "Task" [ 1091.919996] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.931170] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891937, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.956243] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891936, 'name': Rename_Task, 'duration_secs': 0.258183} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.956552] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1091.956810] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f026add-47c4-4ee4-9625-91aee79264ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.963711] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1091.963711] env[68285]: value = "task-2891938" [ 1091.963711] env[68285]: _type = "Task" [ 1091.963711] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.971559] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.042585] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.746683} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.042860] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1092.044141] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1092.044435] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b8daebe-9a03-4910-ab9f-7c04e95706ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.048035] env[68285]: DEBUG nova.network.neutron [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Updated VIF entry in instance network info cache for port e4eda400-b7bd-4283-bcbd-ff116b289b03. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1092.048035] env[68285]: DEBUG nova.network.neutron [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Updating instance_info_cache with network_info: [{"id": "e4eda400-b7bd-4283-bcbd-ff116b289b03", "address": "fa:16:3e:5f:4a:40", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eda400-b7", "ovs_interfaceid": "e4eda400-b7bd-4283-bcbd-ff116b289b03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.053617] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1092.053617] env[68285]: value = "task-2891939" [ 1092.053617] env[68285]: _type = "Task" [ 1092.053617] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.063385] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.076079] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524e9c4c-fdd3-bae0-dc69-988845a84871, 'name': SearchDatastore_Task, 'duration_secs': 0.056289} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.076380] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.076684] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 3858399e-9fc4-4d60-a9d5-95caefb7bd87/3858399e-9fc4-4d60-a9d5-95caefb7bd87.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.076981] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a229ee83-1579-4f74-a5f9-1dbfb2fbc803 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.084473] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1092.084473] env[68285]: value = "task-2891940" [ 1092.084473] env[68285]: _type = "Task" [ 1092.084473] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.093698] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.197814] env[68285]: DEBUG nova.scheduler.client.report [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1092.223458] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: e28d0927-17c2-4256-93d4-ef0cc2c9b92a] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1092.226155] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1092.250656] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.251756] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.251756] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.251756] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.251756] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.251756] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.251963] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.251963] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.252247] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.252451] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.252639] env[68285]: DEBUG nova.virt.hardware [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.253604] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e393ce-0229-4aa4-b603-af06c7f599a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.261399] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891935, 'name': CreateVM_Task, 'duration_secs': 0.641593} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.261975] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1092.262696] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.262981] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.263209] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1092.263878] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2e79606-bbf5-4d8e-99e4-4cb4e64bfce0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.269662] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917e6c29-f8cc-43c6-9705-eef20c380f23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.277034] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1092.277034] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a03a84-3fda-6c3b-2b05-8e5b58082109" [ 1092.277034] env[68285]: _type = "Task" [ 1092.277034] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.294103] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a03a84-3fda-6c3b-2b05-8e5b58082109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.431205] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891937, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.461172] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Successfully updated port: ea8eb01e-5384-4256-a1e0-75cf523ec6b9 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1092.474337] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891938, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.550476] env[68285]: DEBUG oslo_concurrency.lockutils [req-f5028683-c29f-43eb-8c4f-21c559b33509 req-930de648-8a68-4021-8197-28e88e282818 service nova] Releasing lock "refresh_cache-cbf2a387-8a5a-4400-833b-e04e23ca42f7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.564019] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064068} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.564019] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1092.564410] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5aefb99-bd5a-44d4-883c-926e5d201df0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.585491] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1092.585883] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb035baa-5d7c-4ef4-bc54-e1ded3d29018 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.611250] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891940, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49359} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.612845] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 3858399e-9fc4-4d60-a9d5-95caefb7bd87/3858399e-9fc4-4d60-a9d5-95caefb7bd87.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1092.613193] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1092.613498] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1092.613498] env[68285]: value = "task-2891941" [ 1092.613498] env[68285]: _type = "Task" [ 1092.613498] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.613727] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abf0fdc7-b7f3-4d36-99ec-fa4b5acaa10f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.624617] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891941, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.626078] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1092.626078] env[68285]: value = "task-2891942" [ 1092.626078] env[68285]: _type = "Task" [ 1092.626078] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.636666] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.702453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.518s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.702799] env[68285]: INFO nova.compute.manager [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Migrating [ 1092.710193] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.059s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.710424] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.712705] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.735s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.713358] env[68285]: DEBUG nova.objects.instance [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'resources' on Instance uuid f2696c7f-5676-403a-87e0-fb0884866005 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.724763] env[68285]: DEBUG nova.compute.manager [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Received event network-vif-plugged-ea8eb01e-5384-4256-a1e0-75cf523ec6b9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.724967] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] Acquiring lock "d025b807-fda4-4aff-beac-0ad6a092fe74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.725185] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.725472] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.725580] env[68285]: DEBUG nova.compute.manager [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] No waiting events found dispatching network-vif-plugged-ea8eb01e-5384-4256-a1e0-75cf523ec6b9 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.725806] env[68285]: WARNING nova.compute.manager [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Received unexpected event network-vif-plugged-ea8eb01e-5384-4256-a1e0-75cf523ec6b9 for instance with vm_state building and task_state spawning. [ 1092.725979] env[68285]: DEBUG nova.compute.manager [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Received event network-changed-ea8eb01e-5384-4256-a1e0-75cf523ec6b9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.726205] env[68285]: DEBUG nova.compute.manager [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Refreshing instance network info cache due to event network-changed-ea8eb01e-5384-4256-a1e0-75cf523ec6b9. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1092.726358] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] Acquiring lock "refresh_cache-d025b807-fda4-4aff-beac-0ad6a092fe74" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.726500] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] Acquired lock "refresh_cache-d025b807-fda4-4aff-beac-0ad6a092fe74" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.726668] env[68285]: DEBUG nova.network.neutron [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Refreshing network info cache for port ea8eb01e-5384-4256-a1e0-75cf523ec6b9 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1092.729389] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.729567] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Cleaning up deleted instances with incomplete migration {{(pid=68285) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1092.743710] env[68285]: INFO nova.scheduler.client.report [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted allocations for instance 60144efd-061e-4144-9541-b2321c9b0ec1 [ 1092.787375] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a03a84-3fda-6c3b-2b05-8e5b58082109, 'name': SearchDatastore_Task, 'duration_secs': 0.05185} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.787626] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.787902] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.788159] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.788317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.788514] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.788768] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74c0c643-82fe-45c6-9dcb-ea78dddefe65 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.797045] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.797295] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1092.798070] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7bbccd3-22f4-4e92-97ac-a65136695841 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.804022] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1092.804022] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523e4f97-71c7-e38b-37f5-3b4f98af83ce" [ 1092.804022] env[68285]: _type = "Task" [ 1092.804022] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.813569] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523e4f97-71c7-e38b-37f5-3b4f98af83ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.929950] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891937, 'name': CreateSnapshot_Task, 'duration_secs': 0.966919} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.930975] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1092.931160] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f5f34c-9d0c-4af0-8fe1-9131f4a69408 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.964373] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "refresh_cache-d025b807-fda4-4aff-beac-0ad6a092fe74" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.974446] env[68285]: DEBUG oslo_vmware.api [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2891938, 'name': PowerOnVM_Task, 'duration_secs': 0.590328} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.974712] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1092.974907] env[68285]: INFO nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1092.975118] env[68285]: DEBUG nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1092.975879] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e89df8d-2c8e-4651-8882-915f12f2041f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.125932] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891941, 'name': ReconfigVM_Task, 'duration_secs': 0.321013} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.126566] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Reconfigured VM instance instance-00000047 to attach disk [datastore2] bb806297-47c6-45b7-a177-f3300fa1e29a/bb806297-47c6-45b7-a177-f3300fa1e29a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1093.126915] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3dfd5c26-6627-4e86-8019-82e99c6e6016 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.136762] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140001} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.137915] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1093.138253] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1093.138253] env[68285]: value = "task-2891943" [ 1093.138253] env[68285]: _type = "Task" [ 1093.138253] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.138893] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0fdbb7-52ff-41ce-a7cd-628a9297f5c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.159224] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891943, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.168213] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 3858399e-9fc4-4d60-a9d5-95caefb7bd87/3858399e-9fc4-4d60-a9d5-95caefb7bd87.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.168507] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afcb1951-281b-4ca5-877d-dcd6510eecd2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.187405] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1093.187405] env[68285]: value = "task-2891944" [ 1093.187405] env[68285]: _type = "Task" [ 1093.187405] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.195630] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.222617] env[68285]: DEBUG nova.objects.instance [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'numa_topology' on Instance uuid f2696c7f-5676-403a-87e0-fb0884866005 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.223987] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.224234] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.224478] env[68285]: DEBUG nova.network.neutron [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.231523] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.253154] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9c0a3970-2f01-4eb6-a156-2c7ffc729acd tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "60144efd-061e-4144-9541-b2321c9b0ec1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.611s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.262744] env[68285]: DEBUG nova.network.neutron [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1093.316224] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523e4f97-71c7-e38b-37f5-3b4f98af83ce, 'name': SearchDatastore_Task, 'duration_secs': 0.016536} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.316995] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a70b5667-0451-4fd3-9998-16b2961ba290 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.322569] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1093.322569] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52697235-0c98-1eeb-8756-fd344b8f566d" [ 1093.322569] env[68285]: _type = "Task" [ 1093.322569] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.330191] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52697235-0c98-1eeb-8756-fd344b8f566d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.342409] env[68285]: DEBUG nova.network.neutron [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.449435] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1093.449749] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8eb59719-077d-4898-8cd6-6cf309ba419b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.458721] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1093.458721] env[68285]: value = "task-2891945" [ 1093.458721] env[68285]: _type = "Task" [ 1093.458721] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.466187] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891945, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.493507] env[68285]: INFO nova.compute.manager [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Took 58.67 seconds to build instance. [ 1093.654495] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891943, 'name': Rename_Task, 'duration_secs': 0.154275} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.654888] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.655122] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0557993e-4551-4885-b0f8-4365c79815cf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.662123] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1093.662123] env[68285]: value = "task-2891946" [ 1093.662123] env[68285]: _type = "Task" [ 1093.662123] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.675756] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.697945] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.721878] env[68285]: DEBUG nova.compute.manager [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Received event network-changed-a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.722207] env[68285]: DEBUG nova.compute.manager [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Refreshing instance network info cache due to event network-changed-a9d34554-5a11-451d-b371-8a0cdfc63de6. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1093.722366] env[68285]: DEBUG oslo_concurrency.lockutils [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] Acquiring lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.722611] env[68285]: DEBUG oslo_concurrency.lockutils [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] Acquired lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.723312] env[68285]: DEBUG nova.network.neutron [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Refreshing network info cache for port a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1093.727033] env[68285]: DEBUG nova.objects.base [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1093.843914] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52697235-0c98-1eeb-8756-fd344b8f566d, 'name': SearchDatastore_Task, 'duration_secs': 0.039683} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.844212] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.844465] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] cbf2a387-8a5a-4400-833b-e04e23ca42f7/cbf2a387-8a5a-4400-833b-e04e23ca42f7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1093.845024] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e5dc8d6-9a20-4e74-80a6-3ac1fab39c53 req-8493f9bd-2488-4da5-a67a-322cd1e705b0 service nova] Releasing lock "refresh_cache-d025b807-fda4-4aff-beac-0ad6a092fe74" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.845764] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77043fc5-8c56-407e-870a-134b3b940f6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.847680] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "refresh_cache-d025b807-fda4-4aff-beac-0ad6a092fe74" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.847838] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.857104] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1093.857104] env[68285]: value = "task-2891947" [ 1093.857104] env[68285]: _type = "Task" [ 1093.857104] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.869946] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.956225] env[68285]: DEBUG nova.network.neutron [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance_info_cache with network_info: [{"id": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "address": "fa:16:3e:e1:f7:d0", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1a2b-ae", "ovs_interfaceid": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.973526] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891945, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.995347] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b35bfabd-a5b9-4d2e-93a7-a00911de8a7e tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.184s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.108588] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77141e9c-d957-401c-b8f2-6fb38edd8d3b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.116511] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4030560-d0be-4ac0-b472-53d4667b3a0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.153019] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52d2138-649e-40cf-a3b5-ec29e2f5a5d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.158425] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdfa1b1-4e23-4ccb-b75c-1b1d0234a2fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.175873] env[68285]: DEBUG nova.compute.provider_tree [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.180513] env[68285]: DEBUG oslo_vmware.api [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891946, 'name': PowerOnVM_Task, 'duration_secs': 0.469311} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.180780] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.181018] env[68285]: DEBUG nova.compute.manager [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.181898] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82a1e1d-3b2e-4989-be44-779c3fda2d22 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.198783] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891944, 'name': ReconfigVM_Task, 'duration_secs': 0.906133} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.199125] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 3858399e-9fc4-4d60-a9d5-95caefb7bd87/3858399e-9fc4-4d60-a9d5-95caefb7bd87.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1094.199805] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-879628fe-0905-4349-bb48-a930137dac54 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.206524] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1094.206524] env[68285]: value = "task-2891948" [ 1094.206524] env[68285]: _type = "Task" [ 1094.206524] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.214775] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891948, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.339089] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.339464] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.339693] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.339884] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.340069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.342356] env[68285]: INFO nova.compute.manager [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Terminating instance [ 1094.374643] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891947, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.384298] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1094.462025] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.474230] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891945, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.485122] env[68285]: DEBUG nova.network.neutron [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updated VIF entry in instance network info cache for port a9d34554-5a11-451d-b371-8a0cdfc63de6. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1094.485579] env[68285]: DEBUG nova.network.neutron [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updating instance_info_cache with network_info: [{"id": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "address": "fa:16:3e:d1:af:88", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9d34554-5a", "ovs_interfaceid": "a9d34554-5a11-451d-b371-8a0cdfc63de6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.574973] env[68285]: DEBUG nova.network.neutron [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Updating instance_info_cache with network_info: [{"id": "ea8eb01e-5384-4256-a1e0-75cf523ec6b9", "address": "fa:16:3e:d7:d9:e5", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8eb01e-53", "ovs_interfaceid": "ea8eb01e-5384-4256-a1e0-75cf523ec6b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.682524] env[68285]: DEBUG nova.scheduler.client.report [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.696321] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.717283] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891948, 'name': Rename_Task, 'duration_secs': 0.260656} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.717698] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1094.717934] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eeb6b5d5-ae5c-4ae6-8adb-317f849957ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.724126] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1094.724126] env[68285]: value = "task-2891949" [ 1094.724126] env[68285]: _type = "Task" [ 1094.724126] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.731400] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.848950] env[68285]: DEBUG nova.compute.manager [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1094.849211] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1094.850187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b730020-544a-4f5d-9eb7-20a53f774c6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.859554] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1094.859849] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e16afde-7456-49d9-9b84-89e3d2fd40e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.869563] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891947, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.736139} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.870452] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] cbf2a387-8a5a-4400-833b-e04e23ca42f7/cbf2a387-8a5a-4400-833b-e04e23ca42f7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.870685] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1094.870972] env[68285]: DEBUG oslo_vmware.api [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 1094.870972] env[68285]: value = "task-2891950" [ 1094.870972] env[68285]: _type = "Task" [ 1094.870972] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.871166] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d3aefdd-be65-48cb-85fa-19a205e53d23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.880754] env[68285]: DEBUG oslo_vmware.api [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.881955] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1094.881955] env[68285]: value = "task-2891951" [ 1094.881955] env[68285]: _type = "Task" [ 1094.881955] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.889407] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.977422] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891945, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.988356] env[68285]: DEBUG oslo_concurrency.lockutils [req-81b94011-8a49-4cf6-a429-fdb8257b2f19 req-58d46eed-ab20-4e81-93e7-0caa94230ada service nova] Releasing lock "refresh_cache-9c190abd-23ee-4e8e-8b91-9050847581d5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.078729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "refresh_cache-d025b807-fda4-4aff-beac-0ad6a092fe74" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.079101] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Instance network_info: |[{"id": "ea8eb01e-5384-4256-a1e0-75cf523ec6b9", "address": "fa:16:3e:d7:d9:e5", "network": {"id": "231b12a1-4634-4ad8-914a-4a74de128049", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-548125967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b390e5b4080a4984a3f935e9e6a0dd2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8eb01e-53", "ovs_interfaceid": "ea8eb01e-5384-4256-a1e0-75cf523ec6b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1095.079577] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:d9:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea8eb01e-5384-4256-a1e0-75cf523ec6b9', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.087926] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1095.087926] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1095.087926] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e886c0a7-9ee5-48c5-be9d-d6092004376b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.107717] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.107717] env[68285]: value = "task-2891952" [ 1095.107717] env[68285]: _type = "Task" [ 1095.107717] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.116837] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891952, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.188260] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.475s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.191276] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.562s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.191859] env[68285]: INFO nova.compute.claims [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1095.236938] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891949, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.384801] env[68285]: DEBUG oslo_vmware.api [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891950, 'name': PowerOffVM_Task, 'duration_secs': 0.208639} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.388161] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.388392] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1095.388715] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e583ceb3-70fe-4ab4-b8b7-f5cc80deab25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.396354] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100504} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.396859] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1095.398371] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceae220f-9ada-45b7-8323-7fa9ec9255e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.424180] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] cbf2a387-8a5a-4400-833b-e04e23ca42f7/cbf2a387-8a5a-4400-833b-e04e23ca42f7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.424656] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdaaf391-d85e-476d-8bac-090295541af0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.445617] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1095.445617] env[68285]: value = "task-2891954" [ 1095.445617] env[68285]: _type = "Task" [ 1095.445617] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.453819] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.473627] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1095.473627] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1095.473760] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleting the datastore file [datastore1] ec89a2a4-3bfc-45c5-b7f2-239b52995d6b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.474826] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f8993e3-cd1b-4836-9aff-a24e6f9a0472 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.483087] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891945, 'name': CloneVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.489242] env[68285]: DEBUG oslo_vmware.api [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for the task: (returnval){ [ 1095.489242] env[68285]: value = "task-2891955" [ 1095.489242] env[68285]: _type = "Task" [ 1095.489242] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.499909] env[68285]: DEBUG oslo_vmware.api [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.619758] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891952, 'name': CreateVM_Task, 'duration_secs': 0.447143} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.619913] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1095.620648] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.620911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.621258] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1095.621530] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31590b59-db0e-46e4-980b-a1f59c274018 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.627188] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1095.627188] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521fe2d0-38a9-7e51-951c-b811c435e18e" [ 1095.627188] env[68285]: _type = "Task" [ 1095.627188] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.635455] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521fe2d0-38a9-7e51-951c-b811c435e18e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.700702] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d4c318f-11ba-4035-847e-b381b400e07a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 44.698s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.701538] env[68285]: DEBUG oslo_concurrency.lockutils [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 24.479s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.702274] env[68285]: DEBUG oslo_concurrency.lockutils [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "f2696c7f-5676-403a-87e0-fb0884866005-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.702274] env[68285]: DEBUG oslo_concurrency.lockutils [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.702274] env[68285]: DEBUG oslo_concurrency.lockutils [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.705398] env[68285]: INFO nova.compute.manager [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Terminating instance [ 1095.736639] env[68285]: DEBUG oslo_vmware.api [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2891949, 'name': PowerOnVM_Task, 'duration_secs': 0.64363} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.737464] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1095.737735] env[68285]: INFO nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1095.737993] env[68285]: DEBUG nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1095.738920] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d7dfc8-9477-424a-9a07-599fe9514742 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.955717] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891954, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.972028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "bb806297-47c6-45b7-a177-f3300fa1e29a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.972157] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "bb806297-47c6-45b7-a177-f3300fa1e29a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.973528] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "bb806297-47c6-45b7-a177-f3300fa1e29a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.973528] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "bb806297-47c6-45b7-a177-f3300fa1e29a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.973528] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "bb806297-47c6-45b7-a177-f3300fa1e29a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.974311] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891945, 'name': CloneVM_Task, 'duration_secs': 2.05047} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.976532] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Created linked-clone VM from snapshot [ 1095.976996] env[68285]: INFO nova.compute.manager [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Terminating instance [ 1095.978729] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cd8abc-4054-4af5-b135-96f1a7d98d86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.982035] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c2f21d-b34f-4b8a-883a-0a6842c237a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.991224] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Uploading image 16df92d7-466a-491d-b247-71c140d9d824 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1096.010908] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance '1f5fe064-0443-4b7f-911a-45d803836eeb' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1096.019913] env[68285]: DEBUG oslo_vmware.api [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.030225] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1096.030225] env[68285]: value = "vm-580991" [ 1096.030225] env[68285]: _type = "VirtualMachine" [ 1096.030225] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1096.030873] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3f8a39fc-e1ee-4efa-a889-c1d006b887bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.038037] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease: (returnval){ [ 1096.038037] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e636ab-6c3e-9dec-cfb0-d109c6ed71f4" [ 1096.038037] env[68285]: _type = "HttpNfcLease" [ 1096.038037] env[68285]: } obtained for exporting VM: (result){ [ 1096.038037] env[68285]: value = "vm-580991" [ 1096.038037] env[68285]: _type = "VirtualMachine" [ 1096.038037] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1096.038318] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the lease: (returnval){ [ 1096.038318] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e636ab-6c3e-9dec-cfb0-d109c6ed71f4" [ 1096.038318] env[68285]: _type = "HttpNfcLease" [ 1096.038318] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1096.044745] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1096.044745] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e636ab-6c3e-9dec-cfb0-d109c6ed71f4" [ 1096.044745] env[68285]: _type = "HttpNfcLease" [ 1096.044745] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1096.138755] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521fe2d0-38a9-7e51-951c-b811c435e18e, 'name': SearchDatastore_Task, 'duration_secs': 0.032981} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.139067] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.139312] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.139636] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.139812] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.140031] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.140300] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be4f4bca-564e-48f1-ad2d-4775f1e38cc8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.155667] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.155847] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.156598] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bad1ecf1-2414-4da9-9390-0c5aa15cfdff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.161883] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1096.161883] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cec9d2-7b39-6d13-825c-428840a9827f" [ 1096.161883] env[68285]: _type = "Task" [ 1096.161883] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.170419] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52cec9d2-7b39-6d13-825c-428840a9827f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.210304] env[68285]: DEBUG nova.compute.manager [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1096.210515] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.210824] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04ae7930-78b4-4e56-8bbd-0c7924f26b26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.220367] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8b9d53-f79d-4e87-b2b7-58d5b5e51c32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.262836] env[68285]: WARNING nova.virt.vmwareapi.vmops [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f2696c7f-5676-403a-87e0-fb0884866005 could not be found. [ 1096.263077] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1096.263267] env[68285]: INFO nova.compute.manager [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1096.263497] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.267945] env[68285]: DEBUG nova.compute.manager [-] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1096.268076] env[68285]: DEBUG nova.network.neutron [-] [instance: f2696c7f-5676-403a-87e0-fb0884866005] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1096.270095] env[68285]: INFO nova.compute.manager [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Took 55.35 seconds to build instance. [ 1096.456853] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891954, 'name': ReconfigVM_Task, 'duration_secs': 1.008027} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.457175] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Reconfigured VM instance instance-0000004b to attach disk [datastore2] cbf2a387-8a5a-4400-833b-e04e23ca42f7/cbf2a387-8a5a-4400-833b-e04e23ca42f7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.457839] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c30643b6-2605-4d3f-8b4b-ef4c1a7fc851 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.465412] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1096.465412] env[68285]: value = "task-2891957" [ 1096.465412] env[68285]: _type = "Task" [ 1096.465412] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.472486] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891957, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.487385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "refresh_cache-bb806297-47c6-45b7-a177-f3300fa1e29a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.487605] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquired lock "refresh_cache-bb806297-47c6-45b7-a177-f3300fa1e29a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.487846] env[68285]: DEBUG nova.network.neutron [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1096.499538] env[68285]: DEBUG oslo_vmware.api [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Task: {'id': task-2891955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.737904} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.500426] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.500608] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1096.500785] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1096.500950] env[68285]: INFO nova.compute.manager [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1096.501199] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.501389] env[68285]: DEBUG nova.compute.manager [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1096.502014] env[68285]: DEBUG nova.network.neutron [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1096.519346] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.519710] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcc262a9-22a8-44c0-a10e-37ab9043dd5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.529560] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1096.529560] env[68285]: value = "task-2891958" [ 1096.529560] env[68285]: _type = "Task" [ 1096.529560] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.538763] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.549416] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1096.549416] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e636ab-6c3e-9dec-cfb0-d109c6ed71f4" [ 1096.549416] env[68285]: _type = "HttpNfcLease" [ 1096.549416] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1096.549729] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1096.549729] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e636ab-6c3e-9dec-cfb0-d109c6ed71f4" [ 1096.549729] env[68285]: _type = "HttpNfcLease" [ 1096.549729] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1096.550531] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfee3433-f784-4133-90b0-b53576198546 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.564256] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522630d4-9776-ee9d-bd78-dc4578df2cd0/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1096.564452] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522630d4-9776-ee9d-bd78-dc4578df2cd0/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1096.678138] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52cec9d2-7b39-6d13-825c-428840a9827f, 'name': SearchDatastore_Task, 'duration_secs': 0.046035} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.679684] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01a34215-3b88-4713-9659-2c32d735f5b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.687307] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ac0a372b-6b40-42f3-af80-0d25a2f22996 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.691110] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1096.691110] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5260cac9-428f-4b11-88d6-ea873731f911" [ 1096.691110] env[68285]: _type = "Task" [ 1096.691110] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.701267] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5260cac9-428f-4b11-88d6-ea873731f911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.772667] env[68285]: DEBUG oslo_concurrency.lockutils [None req-128c9b4c-2c71-41d0-a29c-c9e172660423 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.864s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.814142] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc6de62-c01f-4623-8c3e-41d32ef61a77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.823881] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7736a37f-e5ba-498c-8854-182c52c38728 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.862964] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d8dbc9-0e97-4ee8-80fa-21e03cfb8dd9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.871638] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f13760d-3e3f-4695-b88f-0e79af94fce4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.891038] env[68285]: DEBUG nova.compute.provider_tree [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.960942] env[68285]: DEBUG nova.compute.manager [req-f72a6f9d-3c3b-4df2-9c62-2a53196340b8 req-5b77ebfa-86a6-41df-889b-30d69c9cc5b8 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Received event network-vif-deleted-3c317408-dd23-42c9-a837-c59782c5654a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1096.961272] env[68285]: INFO nova.compute.manager [req-f72a6f9d-3c3b-4df2-9c62-2a53196340b8 req-5b77ebfa-86a6-41df-889b-30d69c9cc5b8 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Neutron deleted interface 3c317408-dd23-42c9-a837-c59782c5654a; detaching it from the instance and deleting it from the info cache [ 1096.961525] env[68285]: DEBUG nova.network.neutron [req-f72a6f9d-3c3b-4df2-9c62-2a53196340b8 req-5b77ebfa-86a6-41df-889b-30d69c9cc5b8 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.975909] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891957, 'name': Rename_Task, 'duration_secs': 0.185022} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.976866] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.977179] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57e2afdb-f1e4-4df7-806e-65585f975130 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.984194] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1096.984194] env[68285]: value = "task-2891959" [ 1096.984194] env[68285]: _type = "Task" [ 1096.984194] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.997155] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.017272] env[68285]: DEBUG nova.network.neutron [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1097.042819] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891958, 'name': PowerOffVM_Task, 'duration_secs': 0.204432} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.043145] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.043392] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance '1f5fe064-0443-4b7f-911a-45d803836eeb' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1097.080805] env[68285]: DEBUG nova.network.neutron [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.143107] env[68285]: DEBUG nova.network.neutron [-] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.201078] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5260cac9-428f-4b11-88d6-ea873731f911, 'name': SearchDatastore_Task, 'duration_secs': 0.018144} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.201681] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.202880] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d025b807-fda4-4aff-beac-0ad6a092fe74/d025b807-fda4-4aff-beac-0ad6a092fe74.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1097.202880] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d27fdb01-9be6-4486-a4f4-68c257a680e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.209955] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1097.209955] env[68285]: value = "task-2891960" [ 1097.209955] env[68285]: _type = "Task" [ 1097.209955] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.219214] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891960, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.367510] env[68285]: DEBUG nova.network.neutron [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.395961] env[68285]: DEBUG nova.scheduler.client.report [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1097.464797] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd13f978-b204-48be-b3da-42e5a2653a2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.475616] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dd1a33-57cb-413a-9d5b-c4a6abb75842 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.498617] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891959, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.521596] env[68285]: DEBUG nova.compute.manager [req-f72a6f9d-3c3b-4df2-9c62-2a53196340b8 req-5b77ebfa-86a6-41df-889b-30d69c9cc5b8 service nova] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Detach interface failed, port_id=3c317408-dd23-42c9-a837-c59782c5654a, reason: Instance ec89a2a4-3bfc-45c5-b7f2-239b52995d6b could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1097.552959] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1097.553445] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1097.553594] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1097.553915] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1097.554144] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1097.554349] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1097.554630] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1097.554832] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1097.555169] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1097.555415] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1097.555727] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1097.562303] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5de8c406-2c3c-4d77-b679-cce7f570f120 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.581035] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1097.581035] env[68285]: value = "task-2891961" [ 1097.581035] env[68285]: _type = "Task" [ 1097.581035] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.587053] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Releasing lock "refresh_cache-bb806297-47c6-45b7-a177-f3300fa1e29a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.587053] env[68285]: DEBUG nova.compute.manager [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.587053] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.587053] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ea756b-550b-48ac-baff-eeb8d9fa136e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.596603] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.599529] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.599735] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-493432cc-c775-4dc7-8291-de892554bfbf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.608072] env[68285]: DEBUG oslo_vmware.api [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1097.608072] env[68285]: value = "task-2891962" [ 1097.608072] env[68285]: _type = "Task" [ 1097.608072] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.619597] env[68285]: DEBUG oslo_vmware.api [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.646572] env[68285]: INFO nova.compute.manager [-] [instance: f2696c7f-5676-403a-87e0-fb0884866005] Took 1.38 seconds to deallocate network for instance. [ 1097.724458] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891960, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.852562] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "15fd3159-0fff-461d-96ce-f8cfc04eff32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.853342] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.871384] env[68285]: INFO nova.compute.manager [-] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Took 1.37 seconds to deallocate network for instance. [ 1097.902359] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.903617] env[68285]: DEBUG nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1097.908315] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 19.764s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.001591] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891959, 'name': PowerOnVM_Task} progress is 92%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.091328] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891961, 'name': ReconfigVM_Task, 'duration_secs': 0.37395} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.091644] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance '1f5fe064-0443-4b7f-911a-45d803836eeb' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1098.119455] env[68285]: DEBUG oslo_vmware.api [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891962, 'name': PowerOffVM_Task, 'duration_secs': 0.16941} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.119662] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1098.119879] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1098.120195] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8de8944d-8fdc-4b96-8309-e0040563ad00 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.148059] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1098.148059] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1098.148059] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Deleting the datastore file [datastore2] bb806297-47c6-45b7-a177-f3300fa1e29a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.148059] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-979cb6a9-1c74-4301-a85b-ad5f4eb26fc8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.159793] env[68285]: DEBUG oslo_vmware.api [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for the task: (returnval){ [ 1098.159793] env[68285]: value = "task-2891964" [ 1098.159793] env[68285]: _type = "Task" [ 1098.159793] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.167797] env[68285]: DEBUG oslo_vmware.api [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.220879] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891960, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626532} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.221989] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d025b807-fda4-4aff-beac-0ad6a092fe74/d025b807-fda4-4aff-beac-0ad6a092fe74.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1098.222344] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1098.222344] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e519c54c-0227-4d7c-bb04-7fad6e29fdf9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.232141] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1098.232141] env[68285]: value = "task-2891965" [ 1098.232141] env[68285]: _type = "Task" [ 1098.232141] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.241776] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.355836] env[68285]: DEBUG nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1098.380254] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.409411] env[68285]: DEBUG nova.compute.utils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1098.411899] env[68285]: DEBUG nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1098.411899] env[68285]: DEBUG nova.network.neutron [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1098.467458] env[68285]: DEBUG nova.policy [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11e000558c8a44878a90add053bc4a70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c212f8fe09c041209a51099ad3af16d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1098.498924] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891959, 'name': PowerOnVM_Task, 'duration_secs': 1.389131} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.501650] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1098.501900] env[68285]: INFO nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Took 8.82 seconds to spawn the instance on the hypervisor. [ 1098.502115] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1098.503089] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a53158c-6d22-433c-a886-e01e51e2eb11 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.597795] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.598046] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.598210] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.598387] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.598535] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.598677] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.598871] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.599053] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.599230] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.599418] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.599598] env[68285]: DEBUG nova.virt.hardware [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.605950] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Reconfiguring VM instance instance-00000044 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1098.606366] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b294297-f756-4377-a1da-73580edf12e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.630865] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1098.630865] env[68285]: value = "task-2891966" [ 1098.630865] env[68285]: _type = "Task" [ 1098.630865] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.639177] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.672189] env[68285]: DEBUG oslo_vmware.api [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Task: {'id': task-2891964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18634} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.672444] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.672640] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.672817] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.672990] env[68285]: INFO nova.compute.manager [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1098.673772] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.673772] env[68285]: DEBUG nova.compute.manager [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1098.673772] env[68285]: DEBUG nova.network.neutron [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.679576] env[68285]: DEBUG oslo_concurrency.lockutils [None req-09d12e5b-67c2-4ac0-8064-6030536be7b4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "f2696c7f-5676-403a-87e0-fb0884866005" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.978s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.707869] env[68285]: DEBUG nova.network.neutron [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1098.743851] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073304} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.744187] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1098.745014] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c68da1d-2a96-4b78-978f-de49b3b4ee2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.771794] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] d025b807-fda4-4aff-beac-0ad6a092fe74/d025b807-fda4-4aff-beac-0ad6a092fe74.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.775162] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e18c9bd-ebef-4686-86c0-1dd7d4bfb0e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.791795] env[68285]: DEBUG nova.network.neutron [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Successfully created port: c6e96dad-6c94-4213-96ce-d96ca6b80bbe {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1098.800153] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1098.800153] env[68285]: value = "task-2891967" [ 1098.800153] env[68285]: _type = "Task" [ 1098.800153] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.811279] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891967, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.865089] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8141b3f0-453a-4f34-9038-35ec677776c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.876179] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6afd20e-6d86-4df8-859d-2d9dac4240c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.882556] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.914069] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94113572-287a-478a-b08e-af36c44ab950 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.916519] env[68285]: DEBUG nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1098.924742] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4b27e7-5c25-469d-b69a-3dd5b9f6eccf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.941425] env[68285]: DEBUG nova.compute.provider_tree [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.020928] env[68285]: INFO nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Took 47.31 seconds to build instance. [ 1099.142065] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891966, 'name': ReconfigVM_Task, 'duration_secs': 0.215843} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.142521] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Reconfigured VM instance instance-00000044 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1099.143440] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96b06ba-3909-4139-bb2f-7a5c1976da1b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.178322] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 1f5fe064-0443-4b7f-911a-45d803836eeb/1f5fe064-0443-4b7f-911a-45d803836eeb.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.178804] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c7a475f-3497-43aa-b640-ef42143a2836 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.202665] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1099.202665] env[68285]: value = "task-2891968" [ 1099.202665] env[68285]: _type = "Task" [ 1099.202665] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.212245] env[68285]: DEBUG nova.network.neutron [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.213372] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.312729] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891967, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.445559] env[68285]: DEBUG nova.scheduler.client.report [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.523106] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.823s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.714618] env[68285]: INFO nova.compute.manager [-] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Took 1.04 seconds to deallocate network for instance. [ 1099.714971] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891968, 'name': ReconfigVM_Task, 'duration_secs': 0.390754} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.716643] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 1f5fe064-0443-4b7f-911a-45d803836eeb/1f5fe064-0443-4b7f-911a-45d803836eeb.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.716918] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance '1f5fe064-0443-4b7f-911a-45d803836eeb' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1099.814963] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891967, 'name': ReconfigVM_Task, 'duration_secs': 0.583426} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.815251] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Reconfigured VM instance instance-0000004c to attach disk [datastore1] d025b807-fda4-4aff-beac-0ad6a092fe74/d025b807-fda4-4aff-beac-0ad6a092fe74.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.815871] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc14d831-0dcb-444c-aab1-d82d26f6dfbe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.822830] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1099.822830] env[68285]: value = "task-2891969" [ 1099.822830] env[68285]: _type = "Task" [ 1099.822830] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.831318] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891969, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.927192] env[68285]: DEBUG nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1099.950644] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1099.950972] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.951208] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.951593] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.951593] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.951731] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1099.951954] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1099.952166] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1099.952443] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1099.952512] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1099.952748] env[68285]: DEBUG nova.virt.hardware [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1099.958025] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc5447a-5a67-4269-a998-cac10ed79b24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.967297] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8389def-ce27-462a-9787-b1e21077d161 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.226157] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.227093] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67eb7635-b3e5-4551-b422-f3ecf7108e76 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.249335] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bdb2b4-057f-4268-9649-38d4e57f250b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.268797] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance '1f5fe064-0443-4b7f-911a-45d803836eeb' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1100.275330] env[68285]: DEBUG nova.compute.manager [req-ac77e3b2-4e32-409f-a83b-c1fc8a158df9 req-c16e34cb-cd26-4b5e-a2ac-a81b2b289b84 service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Received event network-vif-plugged-c6e96dad-6c94-4213-96ce-d96ca6b80bbe {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1100.275550] env[68285]: DEBUG oslo_concurrency.lockutils [req-ac77e3b2-4e32-409f-a83b-c1fc8a158df9 req-c16e34cb-cd26-4b5e-a2ac-a81b2b289b84 service nova] Acquiring lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.275781] env[68285]: DEBUG oslo_concurrency.lockutils [req-ac77e3b2-4e32-409f-a83b-c1fc8a158df9 req-c16e34cb-cd26-4b5e-a2ac-a81b2b289b84 service nova] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.275957] env[68285]: DEBUG oslo_concurrency.lockutils [req-ac77e3b2-4e32-409f-a83b-c1fc8a158df9 req-c16e34cb-cd26-4b5e-a2ac-a81b2b289b84 service nova] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.276156] env[68285]: DEBUG nova.compute.manager [req-ac77e3b2-4e32-409f-a83b-c1fc8a158df9 req-c16e34cb-cd26-4b5e-a2ac-a81b2b289b84 service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] No waiting events found dispatching network-vif-plugged-c6e96dad-6c94-4213-96ce-d96ca6b80bbe {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1100.276302] env[68285]: WARNING nova.compute.manager [req-ac77e3b2-4e32-409f-a83b-c1fc8a158df9 req-c16e34cb-cd26-4b5e-a2ac-a81b2b289b84 service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Received unexpected event network-vif-plugged-c6e96dad-6c94-4213-96ce-d96ca6b80bbe for instance with vm_state building and task_state spawning. [ 1100.301636] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.301856] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.312148] env[68285]: DEBUG nova.network.neutron [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Successfully updated port: c6e96dad-6c94-4213-96ce-d96ca6b80bbe {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1100.334363] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891969, 'name': Rename_Task, 'duration_secs': 0.2194} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.334872] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1100.335146] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0319a25-b1da-4347-b4ee-c57b9d38368c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.342256] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1100.342256] env[68285]: value = "task-2891970" [ 1100.342256] env[68285]: _type = "Task" [ 1100.342256] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.350525] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.463026] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.555s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.463282] env[68285]: DEBUG nova.compute.manager [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68285) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1100.470257] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.731s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.472472] env[68285]: INFO nova.compute.claims [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1100.808158] env[68285]: DEBUG nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1100.814879] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "refresh_cache-8ebbf943-2cef-4c99-a1c4-b1d213fd9884" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.815143] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "refresh_cache-8ebbf943-2cef-4c99-a1c4-b1d213fd9884" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.815378] env[68285]: DEBUG nova.network.neutron [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1100.821663] env[68285]: DEBUG nova.network.neutron [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Port 025d1a2b-ae65-4a5c-a90f-66fabc72e11c binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1100.853731] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891970, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.041765] env[68285]: INFO nova.scheduler.client.report [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted allocation for migration e2bf8756-19fa-4126-b4c6-11b9b79ae81d [ 1101.332804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.353194] env[68285]: DEBUG oslo_vmware.api [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891970, 'name': PowerOnVM_Task, 'duration_secs': 0.598183} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.353194] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1101.353529] env[68285]: INFO nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Took 9.13 seconds to spawn the instance on the hypervisor. [ 1101.353564] env[68285]: DEBUG nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1101.354381] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2d39b1-7bd0-4d97-af2f-1bb1d1a2463b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.357517] env[68285]: DEBUG nova.network.neutron [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1101.502544] env[68285]: DEBUG nova.network.neutron [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Updating instance_info_cache with network_info: [{"id": "c6e96dad-6c94-4213-96ce-d96ca6b80bbe", "address": "fa:16:3e:f2:f4:cc", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e96dad-6c", "ovs_interfaceid": "c6e96dad-6c94-4213-96ce-d96ca6b80bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.550581] env[68285]: DEBUG nova.objects.instance [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'flavor' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.552271] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ffacf6a8-de35-4099-8332-6e77fbdbff1c tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 26.387s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.810623] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d09198-5ba1-4b79-b972-83fd9fc217c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.819124] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44938b1e-db64-4338-9540-e1660003ac4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.863977] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af1db3a-7f73-4cb5-a376-f051f04e2ede {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.870907] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.871204] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.871361] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.882688] env[68285]: INFO nova.compute.manager [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Took 50.13 seconds to build instance. [ 1101.884985] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312a6d42-9728-45e1-a355-f991edd3f9b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.900056] env[68285]: DEBUG nova.compute.provider_tree [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.011324] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "refresh_cache-8ebbf943-2cef-4c99-a1c4-b1d213fd9884" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.011565] env[68285]: DEBUG nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Instance network_info: |[{"id": "c6e96dad-6c94-4213-96ce-d96ca6b80bbe", "address": "fa:16:3e:f2:f4:cc", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e96dad-6c", "ovs_interfaceid": "c6e96dad-6c94-4213-96ce-d96ca6b80bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1102.011953] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:f4:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b89fd3b-0470-40c9-bb5b-d52c76c030e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6e96dad-6c94-4213-96ce-d96ca6b80bbe', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1102.031208] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1102.031537] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1102.031868] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8c95323-0c18-4278-b371-349a20e96041 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.062548] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1102.062548] env[68285]: value = "task-2891971" [ 1102.062548] env[68285]: _type = "Task" [ 1102.062548] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.063265] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.063265] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.063528] env[68285]: DEBUG nova.network.neutron [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.063676] env[68285]: DEBUG nova.objects.instance [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'info_cache' on Instance uuid 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.081194] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891971, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.392393] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc02b329-3e8e-4d76-8a55-69fb39354454 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.651s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.403735] env[68285]: DEBUG nova.scheduler.client.report [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1102.444829] env[68285]: DEBUG nova.compute.manager [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Received event network-changed-c6e96dad-6c94-4213-96ce-d96ca6b80bbe {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.445052] env[68285]: DEBUG nova.compute.manager [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Refreshing instance network info cache due to event network-changed-c6e96dad-6c94-4213-96ce-d96ca6b80bbe. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1102.449551] env[68285]: DEBUG oslo_concurrency.lockutils [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] Acquiring lock "refresh_cache-8ebbf943-2cef-4c99-a1c4-b1d213fd9884" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.449551] env[68285]: DEBUG oslo_concurrency.lockutils [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] Acquired lock "refresh_cache-8ebbf943-2cef-4c99-a1c4-b1d213fd9884" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.449551] env[68285]: DEBUG nova.network.neutron [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Refreshing network info cache for port c6e96dad-6c94-4213-96ce-d96ca6b80bbe {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1102.576177] env[68285]: DEBUG nova.objects.base [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Object Instance<34aeba05-804e-444c-8e58-69c7721b10b1> lazy-loaded attributes: flavor,info_cache {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1102.581039] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891971, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.909352] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.909807] env[68285]: DEBUG nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1102.914153] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.914153] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.914153] env[68285]: DEBUG nova.network.neutron [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.915114] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.327s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.916496] env[68285]: INFO nova.compute.claims [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1103.080053] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891971, 'name': CreateVM_Task, 'duration_secs': 0.544433} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.080053] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1103.080717] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.080905] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.081285] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1103.083692] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c923c77-7b11-4a9b-92b6-ac274d84f830 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.091314] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1103.091314] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5207dbff-4c4e-e84b-f2ed-626beb939f77" [ 1103.091314] env[68285]: _type = "Task" [ 1103.091314] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.099800] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5207dbff-4c4e-e84b-f2ed-626beb939f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.252664] env[68285]: DEBUG nova.network.neutron [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Updated VIF entry in instance network info cache for port c6e96dad-6c94-4213-96ce-d96ca6b80bbe. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1103.253134] env[68285]: DEBUG nova.network.neutron [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Updating instance_info_cache with network_info: [{"id": "c6e96dad-6c94-4213-96ce-d96ca6b80bbe", "address": "fa:16:3e:f2:f4:cc", "network": {"id": "29140596-472d-439e-878e-bfff12ffdf03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-909517823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c212f8fe09c041209a51099ad3af16d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e96dad-6c", "ovs_interfaceid": "c6e96dad-6c94-4213-96ce-d96ca6b80bbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.331116] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.331116] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.331116] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.331116] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.331443] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.333750] env[68285]: INFO nova.compute.manager [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Terminating instance [ 1103.408478] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "d025b807-fda4-4aff-beac-0ad6a092fe74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.408743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.408945] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "d025b807-fda4-4aff-beac-0ad6a092fe74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.409153] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.409417] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.412979] env[68285]: INFO nova.compute.manager [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Terminating instance [ 1103.424034] env[68285]: DEBUG nova.compute.utils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1103.427518] env[68285]: DEBUG nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1103.602415] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5207dbff-4c4e-e84b-f2ed-626beb939f77, 'name': SearchDatastore_Task, 'duration_secs': 0.013312} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.605427] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.605676] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1103.605907] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.606068] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.606249] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.606614] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4aa48f9-75ac-4d57-9042-05af203d0650 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.623093] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.623093] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1103.623961] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3345089-d8ef-4751-8638-456c27eaaa37 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.632070] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1103.632070] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bc63c0-8b40-e488-c737-9fb255ee29e6" [ 1103.632070] env[68285]: _type = "Task" [ 1103.632070] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.640982] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bc63c0-8b40-e488-c737-9fb255ee29e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.677153] env[68285]: DEBUG nova.network.neutron [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [{"id": "b67172eb-4f98-4870-a433-22f6e238cbf4", "address": "fa:16:3e:69:33:45", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67172eb-4f", "ovs_interfaceid": "b67172eb-4f98-4870-a433-22f6e238cbf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.692708] env[68285]: DEBUG nova.network.neutron [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance_info_cache with network_info: [{"id": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "address": "fa:16:3e:e1:f7:d0", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1a2b-ae", "ovs_interfaceid": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.756799] env[68285]: DEBUG oslo_concurrency.lockutils [req-23abcca6-5af9-4792-a22f-3d908b3506d2 req-1197270a-aaa2-4c40-a176-76e05011f8ad service nova] Releasing lock "refresh_cache-8ebbf943-2cef-4c99-a1c4-b1d213fd9884" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.837776] env[68285]: DEBUG nova.compute.manager [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1103.838307] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1103.838974] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8789c88-1467-4c0e-8d03-af173c4ccc7c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.847613] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.847873] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af965789-1299-4bed-a8c8-99b7a503b4c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.855348] env[68285]: DEBUG oslo_vmware.api [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1103.855348] env[68285]: value = "task-2891972" [ 1103.855348] env[68285]: _type = "Task" [ 1103.855348] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.864146] env[68285]: DEBUG oslo_vmware.api [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.919113] env[68285]: DEBUG nova.compute.manager [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1103.919446] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1103.920432] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1bd4cd-2ba6-4b32-8809-981d1e1cd652 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.928085] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.928336] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b82c4a9-0a27-4cda-9ab4-c61f994cbf92 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.930207] env[68285]: DEBUG nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1103.941525] env[68285]: DEBUG oslo_vmware.api [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1103.941525] env[68285]: value = "task-2891973" [ 1103.941525] env[68285]: _type = "Task" [ 1103.941525] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.950980] env[68285]: DEBUG oslo_vmware.api [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.142948] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bc63c0-8b40-e488-c737-9fb255ee29e6, 'name': SearchDatastore_Task, 'duration_secs': 0.012077} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.146532] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-375177f7-38b6-4acb-9e8a-ec6bac40d982 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.152040] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1104.152040] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529600bb-d4f4-7c99-cb6e-71f3f2170c36" [ 1104.152040] env[68285]: _type = "Task" [ 1104.152040] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.162998] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529600bb-d4f4-7c99-cb6e-71f3f2170c36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.179871] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-34aeba05-804e-444c-8e58-69c7721b10b1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.196582] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.240021] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522630d4-9776-ee9d-bd78-dc4578df2cd0/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1104.240929] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae9df9e-d355-4114-b888-b25a0f7deb6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.248440] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522630d4-9776-ee9d-bd78-dc4578df2cd0/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1104.248680] env[68285]: ERROR oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522630d4-9776-ee9d-bd78-dc4578df2cd0/disk-0.vmdk due to incomplete transfer. [ 1104.251522] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-227bc002-a6b5-4cf3-80da-d96e62b2ffd6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.258895] env[68285]: DEBUG oslo_vmware.rw_handles [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522630d4-9776-ee9d-bd78-dc4578df2cd0/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1104.259110] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Uploaded image 16df92d7-466a-491d-b247-71c140d9d824 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1104.261370] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1104.261630] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-847888a0-445d-463f-bfd4-de3686371752 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.270350] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1104.270350] env[68285]: value = "task-2891974" [ 1104.270350] env[68285]: _type = "Task" [ 1104.270350] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.280668] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891974, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.290860] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940b391f-dac8-482f-a699-4e4692df78b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.297829] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547fed7d-8b52-4842-a11e-ca8a738bdde7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.328838] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad731c93-56dc-49a1-91da-d9f1d2cd6bb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.335798] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624a5cf2-f0e3-4c2a-a651-da7d44e5a5b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.348451] env[68285]: DEBUG nova.compute.provider_tree [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.363795] env[68285]: DEBUG oslo_vmware.api [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891972, 'name': PowerOffVM_Task, 'duration_secs': 0.260264} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.364153] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.364411] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.364651] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-531f2312-1a86-4ecc-afa7-6186a33bf1dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.428627] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1104.428926] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1104.429145] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleting the datastore file [datastore2] cbf2a387-8a5a-4400-833b-e04e23ca42f7 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.429415] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a791c46-4f3f-4424-8cbc-69889e9e8d9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.438972] env[68285]: DEBUG oslo_vmware.api [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1104.438972] env[68285]: value = "task-2891976" [ 1104.438972] env[68285]: _type = "Task" [ 1104.438972] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.450409] env[68285]: DEBUG oslo_vmware.api [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.453405] env[68285]: DEBUG oslo_vmware.api [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891973, 'name': PowerOffVM_Task, 'duration_secs': 0.22471} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.453912] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.453912] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.454052] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b549132e-4e83-42dd-909f-97c4acd0f310 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.544787] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1104.545030] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1104.545220] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleting the datastore file [datastore1] d025b807-fda4-4aff-beac-0ad6a092fe74 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.545489] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4459ad0f-6085-4e02-b5bf-618ff9578840 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.551717] env[68285]: DEBUG oslo_vmware.api [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for the task: (returnval){ [ 1104.551717] env[68285]: value = "task-2891978" [ 1104.551717] env[68285]: _type = "Task" [ 1104.551717] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.560374] env[68285]: DEBUG oslo_vmware.api [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.663610] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529600bb-d4f4-7c99-cb6e-71f3f2170c36, 'name': SearchDatastore_Task, 'duration_secs': 0.012214} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.663885] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.664182] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8ebbf943-2cef-4c99-a1c4-b1d213fd9884/8ebbf943-2cef-4c99-a1c4-b1d213fd9884.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1104.664494] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-acb25303-6f41-4647-9358-99e3ae4e3000 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.671331] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1104.671331] env[68285]: value = "task-2891979" [ 1104.671331] env[68285]: _type = "Task" [ 1104.671331] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.680071] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.723152] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febcbf15-f577-4edd-8eed-c912832f04dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.743712] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575f5925-0e2f-44d4-a411-68248585c801 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.750775] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance '1f5fe064-0443-4b7f-911a-45d803836eeb' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1104.780494] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891974, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.851587] env[68285]: DEBUG nova.scheduler.client.report [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1104.942098] env[68285]: DEBUG nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1104.952430] env[68285]: DEBUG oslo_vmware.api [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328615} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.952544] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1104.952812] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1104.953016] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1104.953208] env[68285]: INFO nova.compute.manager [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1104.953465] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1104.953666] env[68285]: DEBUG nova.compute.manager [-] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1104.953779] env[68285]: DEBUG nova.network.neutron [-] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1104.966964] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1104.967706] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.967876] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.968075] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.968240] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.968548] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1104.968805] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1104.968967] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1104.969149] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1104.969322] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1104.969492] env[68285]: DEBUG nova.virt.hardware [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1104.970772] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9145bd-f043-488f-b4f2-bf11abb0a558 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.979049] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b17faba-cfd3-4ebc-8501-e7d0b9d6019e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.992943] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1104.998454] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Creating folder: Project (c00201a5d33145d08ba77ed4f875c960). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1104.999176] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65c11fa5-9255-47e0-85c5-9c55682fc71a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.009613] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Created folder: Project (c00201a5d33145d08ba77ed4f875c960) in parent group-v580775. [ 1105.009613] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Creating folder: Instances. Parent ref: group-v580994. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1105.009613] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c59f3eb-2652-46a2-8047-6c61e687dcf7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.018648] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Created folder: Instances in parent group-v580994. [ 1105.018912] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.019130] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a040977-b57e-4b67-b259-065b788141de] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1105.019605] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ea692e7-f8e5-4c3d-baab-0df4c6af4580 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.038580] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.038580] env[68285]: value = "task-2891982" [ 1105.038580] env[68285]: _type = "Task" [ 1105.038580] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.047108] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891982, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.060859] env[68285]: DEBUG oslo_vmware.api [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Task: {'id': task-2891978, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242001} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.061133] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.061325] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.061493] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.061742] env[68285]: INFO nova.compute.manager [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1105.061903] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.062144] env[68285]: DEBUG nova.compute.manager [-] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.062237] env[68285]: DEBUG nova.network.neutron [-] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.182676] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891979, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.187442] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.187781] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74cd548c-5ddc-4423-8c48-c6d361e42b65 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.194568] env[68285]: DEBUG oslo_vmware.api [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1105.194568] env[68285]: value = "task-2891983" [ 1105.194568] env[68285]: _type = "Task" [ 1105.194568] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.205053] env[68285]: DEBUG oslo_vmware.api [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891983, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.257751] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.260154] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4364a47-4396-4d65-83ee-852552f564f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.265445] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1105.265445] env[68285]: value = "task-2891984" [ 1105.265445] env[68285]: _type = "Task" [ 1105.265445] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.279278] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.285992] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891974, 'name': Destroy_Task, 'duration_secs': 0.61073} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.287131] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Destroyed the VM [ 1105.287856] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1105.288664] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-24808489-8ec2-47c5-b0b0-2e02a167e6b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.295238] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1105.295238] env[68285]: value = "task-2891985" [ 1105.295238] env[68285]: _type = "Task" [ 1105.295238] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.305124] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891985, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.307765] env[68285]: DEBUG nova.compute.manager [req-55d68958-f059-4863-906c-29e03b577681 req-aaefaf1c-3907-4ba0-a157-94a66b7b1ffc service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Received event network-vif-deleted-e4eda400-b7bd-4283-bcbd-ff116b289b03 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1105.307866] env[68285]: INFO nova.compute.manager [req-55d68958-f059-4863-906c-29e03b577681 req-aaefaf1c-3907-4ba0-a157-94a66b7b1ffc service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Neutron deleted interface e4eda400-b7bd-4283-bcbd-ff116b289b03; detaching it from the instance and deleting it from the info cache [ 1105.308060] env[68285]: DEBUG nova.network.neutron [req-55d68958-f059-4863-906c-29e03b577681 req-aaefaf1c-3907-4ba0-a157-94a66b7b1ffc service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.356932] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.357524] env[68285]: DEBUG nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1105.360590] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.983s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.360831] env[68285]: DEBUG nova.objects.instance [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lazy-loading 'resources' on Instance uuid a97df3d2-c182-46d8-95c2-61caccade285 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.434074] env[68285]: DEBUG nova.compute.manager [req-0b5a22b4-24f2-45a7-8ef6-397421fb1170 req-424cd95f-0708-466e-8465-3259f805ed79 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Received event network-vif-deleted-ea8eb01e-5384-4256-a1e0-75cf523ec6b9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1105.434325] env[68285]: INFO nova.compute.manager [req-0b5a22b4-24f2-45a7-8ef6-397421fb1170 req-424cd95f-0708-466e-8465-3259f805ed79 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Neutron deleted interface ea8eb01e-5384-4256-a1e0-75cf523ec6b9; detaching it from the instance and deleting it from the info cache [ 1105.434507] env[68285]: DEBUG nova.network.neutron [req-0b5a22b4-24f2-45a7-8ef6-397421fb1170 req-424cd95f-0708-466e-8465-3259f805ed79 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.548257] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891982, 'name': CreateVM_Task, 'duration_secs': 0.345781} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.548467] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a040977-b57e-4b67-b259-065b788141de] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.548856] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.549287] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.549344] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1105.549686] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ca5e9ac-4cf5-4f33-9547-6e45ae17e366 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.553954] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1105.553954] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fc2dc9-b6fd-c1e4-dc67-0bf428ddc707" [ 1105.553954] env[68285]: _type = "Task" [ 1105.553954] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.561689] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fc2dc9-b6fd-c1e4-dc67-0bf428ddc707, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.686315] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891979, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.686697] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8ebbf943-2cef-4c99-a1c4-b1d213fd9884/8ebbf943-2cef-4c99-a1c4-b1d213fd9884.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1105.686992] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1105.687406] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f7830ee-b053-4629-b4c2-08af9ccfe3ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.696032] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1105.696032] env[68285]: value = "task-2891986" [ 1105.696032] env[68285]: _type = "Task" [ 1105.696032] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.707223] env[68285]: DEBUG oslo_vmware.api [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891983, 'name': PowerOnVM_Task, 'duration_secs': 0.511448} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.710185] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.710432] env[68285]: DEBUG nova.compute.manager [None req-dc261265-0f97-4c28-b609-1b69c2cfeee2 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.710744] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.712056] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e1b483-7f9a-476e-b792-2a8575c1f77f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.771751] env[68285]: DEBUG nova.network.neutron [-] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.778768] env[68285]: DEBUG oslo_vmware.api [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2891984, 'name': PowerOnVM_Task, 'duration_secs': 0.469204} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.779730] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.779730] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-55aa96f5-a2db-4315-aa91-ca1a1aee01d8 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance '1f5fe064-0443-4b7f-911a-45d803836eeb' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1105.805577] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891985, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.810300] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9619b38d-769b-402f-861b-33dd79db901f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.818309] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed09372-4a93-4b5d-98e4-fed9d876699e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.850729] env[68285]: DEBUG nova.compute.manager [req-55d68958-f059-4863-906c-29e03b577681 req-aaefaf1c-3907-4ba0-a157-94a66b7b1ffc service nova] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Detach interface failed, port_id=e4eda400-b7bd-4283-bcbd-ff116b289b03, reason: Instance cbf2a387-8a5a-4400-833b-e04e23ca42f7 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1105.864330] env[68285]: DEBUG nova.compute.utils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1105.868890] env[68285]: DEBUG nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1105.915855] env[68285]: DEBUG nova.network.neutron [-] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.936927] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92e054d6-ce66-47c5-92ba-15e6bb05f60d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.950412] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d52588-1c96-44a2-83b8-ac1a2358cf3b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.989701] env[68285]: DEBUG nova.compute.manager [req-0b5a22b4-24f2-45a7-8ef6-397421fb1170 req-424cd95f-0708-466e-8465-3259f805ed79 service nova] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Detach interface failed, port_id=ea8eb01e-5384-4256-a1e0-75cf523ec6b9, reason: Instance d025b807-fda4-4aff-beac-0ad6a092fe74 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1106.067112] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fc2dc9-b6fd-c1e4-dc67-0bf428ddc707, 'name': SearchDatastore_Task, 'duration_secs': 0.023499} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.069900] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.070205] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.070474] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.070651] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.070883] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.071581] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f34ccfb9-a06d-48cd-8ebb-5404ded59dca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.086677] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.086677] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.086677] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-382b7855-c161-4ad1-916d-964c26840774 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.092102] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1106.092102] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527c13b7-b2d5-cec8-da38-90d98d68765b" [ 1106.092102] env[68285]: _type = "Task" [ 1106.092102] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.103802] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527c13b7-b2d5-cec8-da38-90d98d68765b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.156509] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "ce780600-5dc9-4a60-b54e-415cd1766ffb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.156737] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.209990] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115583} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.210284] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1106.211095] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c34c67f-5751-4932-b55b-2751467b1833 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.237307] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 8ebbf943-2cef-4c99-a1c4-b1d213fd9884/8ebbf943-2cef-4c99-a1c4-b1d213fd9884.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.241820] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1d4e054-02a4-4b64-8c5e-b48e0415963e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.263293] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1106.263293] env[68285]: value = "task-2891987" [ 1106.263293] env[68285]: _type = "Task" [ 1106.263293] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.269205] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdf3f75-2467-4eaf-9596-fec6494f7a3a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.276238] env[68285]: INFO nova.compute.manager [-] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Took 1.32 seconds to deallocate network for instance. [ 1106.281734] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0606178a-2a60-480f-9a68-de778ca33008 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.285466] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891987, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.325415] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25560ed8-f925-4dd0-8ad5-42a594c96c06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.335995] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891985, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.337435] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382b75ec-6a60-494e-b061-39ba9d1e11c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.351788] env[68285]: DEBUG nova.compute.provider_tree [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.368959] env[68285]: DEBUG nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1106.418465] env[68285]: INFO nova.compute.manager [-] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Took 1.36 seconds to deallocate network for instance. [ 1106.556529] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "34aeba05-804e-444c-8e58-69c7721b10b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.556818] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.557053] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.557244] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.557466] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.559978] env[68285]: INFO nova.compute.manager [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Terminating instance [ 1106.602568] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527c13b7-b2d5-cec8-da38-90d98d68765b, 'name': SearchDatastore_Task, 'duration_secs': 0.017528} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.603356] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ff8a9f5-71ea-489f-9ec5-749a43990af1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.611186] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1106.611186] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523faff0-33a4-063b-6773-5938ba290dec" [ 1106.611186] env[68285]: _type = "Task" [ 1106.611186] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.618770] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523faff0-33a4-063b-6773-5938ba290dec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.658851] env[68285]: DEBUG nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1106.773498] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891987, 'name': ReconfigVM_Task, 'duration_secs': 0.268412} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.773735] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 8ebbf943-2cef-4c99-a1c4-b1d213fd9884/8ebbf943-2cef-4c99-a1c4-b1d213fd9884.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.774465] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03467819-511b-4a66-bd10-834618c0d5d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.782298] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1106.782298] env[68285]: value = "task-2891988" [ 1106.782298] env[68285]: _type = "Task" [ 1106.782298] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.790505] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.790770] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891988, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.830846] env[68285]: DEBUG oslo_vmware.api [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2891985, 'name': RemoveSnapshot_Task, 'duration_secs': 1.14323} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.831144] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1106.831426] env[68285]: INFO nova.compute.manager [None req-cc190bb5-0add-4f7e-926c-dd77d43c1a8e tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Took 15.46 seconds to snapshot the instance on the hypervisor. [ 1106.855167] env[68285]: DEBUG nova.scheduler.client.report [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.925330] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.064139] env[68285]: DEBUG nova.compute.manager [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1107.064391] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1107.065316] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a575809d-4570-4992-8aa6-9b2cbaff574e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.073346] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1107.073601] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-971ed536-3915-4abc-bd42-07abbbb343b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.079806] env[68285]: DEBUG oslo_vmware.api [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1107.079806] env[68285]: value = "task-2891989" [ 1107.079806] env[68285]: _type = "Task" [ 1107.079806] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.088165] env[68285]: DEBUG oslo_vmware.api [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.122409] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523faff0-33a4-063b-6773-5938ba290dec, 'name': SearchDatastore_Task, 'duration_secs': 0.021149} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.122691] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.123073] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1a040977-b57e-4b67-b259-065b788141de/1a040977-b57e-4b67-b259-065b788141de.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.123768] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-464dc726-7e36-4c25-b9dd-a14507cdea4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.129920] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1107.129920] env[68285]: value = "task-2891990" [ 1107.129920] env[68285]: _type = "Task" [ 1107.129920] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.138264] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.183186] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.293178] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891988, 'name': Rename_Task, 'duration_secs': 0.150853} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.293531] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1107.293706] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a73c2881-aee5-4f4a-b3f6-9eea50fdd28e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.306157] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1107.306157] env[68285]: value = "task-2891991" [ 1107.306157] env[68285]: _type = "Task" [ 1107.306157] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.314309] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.360719] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.364163] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.610s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.366653] env[68285]: INFO nova.compute.claims [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1107.377714] env[68285]: DEBUG nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1107.381245] env[68285]: INFO nova.scheduler.client.report [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Deleted allocations for instance a97df3d2-c182-46d8-95c2-61caccade285 [ 1107.410752] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1107.411087] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1107.411328] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1107.411570] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1107.411763] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1107.412020] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1107.412292] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1107.412512] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1107.412746] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1107.412954] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1107.413197] env[68285]: DEBUG nova.virt.hardware [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1107.414495] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6a6c7d-6806-43a7-ab7e-8c1d0d5aa302 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.423088] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d008984b-06f4-4643-8acb-ae05fc8bc46d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.438051] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.444408] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.445454] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1107.445693] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8fd3f56-143b-4ab0-b851-f355b5913070 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.464037] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.464037] env[68285]: value = "task-2891992" [ 1107.464037] env[68285]: _type = "Task" [ 1107.464037] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.472392] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891992, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.590359] env[68285]: DEBUG oslo_vmware.api [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891989, 'name': PowerOffVM_Task, 'duration_secs': 0.30057} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.590679] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1107.590870] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1107.591193] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2b57a7f-c60f-420f-8336-3374cecca285 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.642010] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891990, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.682372] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1107.682693] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1107.682931] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleting the datastore file [datastore1] 34aeba05-804e-444c-8e58-69c7721b10b1 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1107.683331] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a64c88ef-7301-4be6-961a-a0169ceceacd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.693940] env[68285]: DEBUG oslo_vmware.api [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1107.693940] env[68285]: value = "task-2891994" [ 1107.693940] env[68285]: _type = "Task" [ 1107.693940] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.702723] env[68285]: DEBUG oslo_vmware.api [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.816056] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891991, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.875184] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "1f5fe064-0443-4b7f-911a-45d803836eeb" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.876164] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.876164] env[68285]: DEBUG nova.compute.manager [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Going to confirm migration 4 {{(pid=68285) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1107.892101] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a9354680-7a91-468c-905d-0812e705c110 tempest-MigrationsAdminTest-1593159837 tempest-MigrationsAdminTest-1593159837-project-member] Lock "a97df3d2-c182-46d8-95c2-61caccade285" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.210s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.973910] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891992, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.141336] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577456} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.141624] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 1a040977-b57e-4b67-b259-065b788141de/1a040977-b57e-4b67-b259-065b788141de.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1108.141842] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1108.142146] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb21d822-b9b9-4dbd-bfe7-e3e027b37b05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.148500] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1108.148500] env[68285]: value = "task-2891995" [ 1108.148500] env[68285]: _type = "Task" [ 1108.148500] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.156879] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891995, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.202367] env[68285]: DEBUG oslo_vmware.api [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2891994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195155} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.202636] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1108.202816] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1108.203016] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1108.203200] env[68285]: INFO nova.compute.manager [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1108.203434] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.203619] env[68285]: DEBUG nova.compute.manager [-] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1108.203731] env[68285]: DEBUG nova.network.neutron [-] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1108.315841] env[68285]: DEBUG oslo_vmware.api [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891991, 'name': PowerOnVM_Task, 'duration_secs': 0.611562} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.316158] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1108.316521] env[68285]: INFO nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Took 8.39 seconds to spawn the instance on the hypervisor. [ 1108.316630] env[68285]: DEBUG nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1108.317444] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a11abb-dbfe-436a-8fc0-b3f412835420 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.456267] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.456456] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.456634] env[68285]: DEBUG nova.network.neutron [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1108.457602] env[68285]: DEBUG nova.objects.instance [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lazy-loading 'info_cache' on Instance uuid 1f5fe064-0443-4b7f-911a-45d803836eeb {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.475934] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891992, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.658266] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891995, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060279} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.660807] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1108.661870] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f47219f-420e-41fe-b046-345069159789 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.681846] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 1a040977-b57e-4b67-b259-065b788141de/1a040977-b57e-4b67-b259-065b788141de.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.684311] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b601758a-5661-4332-a5eb-cc98105682a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.704074] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1108.704074] env[68285]: value = "task-2891996" [ 1108.704074] env[68285]: _type = "Task" [ 1108.704074] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.713728] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891996, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.803460] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb771381-64fb-45cc-bd6d-452717cee7ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.811389] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84253eb8-051d-4f00-b84d-570e2c4778bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.853476] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78caebfb-bdae-4024-bd46-5aa1ca8a9067 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.855285] env[68285]: INFO nova.compute.manager [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Took 36.25 seconds to build instance. [ 1108.861167] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1c1eb6-75d3-4e92-bd7b-abfa1f012b6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.876352] env[68285]: DEBUG nova.compute.provider_tree [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.914740] env[68285]: DEBUG nova.compute.manager [req-78bfd96e-16ad-40c2-866e-fc02126f6133 req-7ad86823-e9c6-42ec-9395-4ac0638954fd service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Received event network-vif-deleted-b67172eb-4f98-4870-a433-22f6e238cbf4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.914904] env[68285]: INFO nova.compute.manager [req-78bfd96e-16ad-40c2-866e-fc02126f6133 req-7ad86823-e9c6-42ec-9395-4ac0638954fd service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Neutron deleted interface b67172eb-4f98-4870-a433-22f6e238cbf4; detaching it from the instance and deleting it from the info cache [ 1108.915041] env[68285]: DEBUG nova.network.neutron [req-78bfd96e-16ad-40c2-866e-fc02126f6133 req-7ad86823-e9c6-42ec-9395-4ac0638954fd service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.978151] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2891992, 'name': CreateVM_Task, 'duration_secs': 1.413158} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.978151] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.978151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.978151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.978151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1108.978151] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de0f5b74-94d8-44b4-afb5-008e73e279d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.983253] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1108.983253] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5272c3f3-7cb0-6c64-724b-9e9fbcd0da8b" [ 1108.983253] env[68285]: _type = "Task" [ 1108.983253] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.995513] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5272c3f3-7cb0-6c64-724b-9e9fbcd0da8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.106592] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.214271] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.360949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0e3c0096-a264-4d2c-9a96-2eb56520c5ad tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.758s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.360949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.254s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.360949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.361326] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.361326] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.363971] env[68285]: INFO nova.compute.manager [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Terminating instance [ 1109.366296] env[68285]: DEBUG nova.network.neutron [-] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.382456] env[68285]: DEBUG nova.scheduler.client.report [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1109.420531] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40482f88-e839-401d-a81c-c6ebd056326a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.429867] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26923a7-c13c-4777-a369-8e04f2b1d675 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.465076] env[68285]: DEBUG nova.compute.manager [req-78bfd96e-16ad-40c2-866e-fc02126f6133 req-7ad86823-e9c6-42ec-9395-4ac0638954fd service nova] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Detach interface failed, port_id=b67172eb-4f98-4870-a433-22f6e238cbf4, reason: Instance 34aeba05-804e-444c-8e58-69c7721b10b1 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1109.493948] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5272c3f3-7cb0-6c64-724b-9e9fbcd0da8b, 'name': SearchDatastore_Task, 'duration_secs': 0.048325} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.494328] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.494600] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1109.494866] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.495054] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.495279] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1109.495566] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11d0463e-64a7-4c65-ae14-7b6cadbd1c13 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.503479] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1109.503662] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1109.504365] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c24b816-95a7-44dc-9985-1589222313dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.511804] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1109.511804] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524b6ca3-a72a-7e20-c631-1c6b03bb84be" [ 1109.511804] env[68285]: _type = "Task" [ 1109.511804] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.520703] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524b6ca3-a72a-7e20-c631-1c6b03bb84be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.717052] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891996, 'name': ReconfigVM_Task, 'duration_secs': 0.79676} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.718158] env[68285]: DEBUG nova.network.neutron [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance_info_cache with network_info: [{"id": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "address": "fa:16:3e:e1:f7:d0", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1a2b-ae", "ovs_interfaceid": "025d1a2b-ae65-4a5c-a90f-66fabc72e11c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.719443] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 1a040977-b57e-4b67-b259-065b788141de/1a040977-b57e-4b67-b259-065b788141de.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.720815] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b724a99-9124-497c-ad0c-999247976270 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.727602] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1109.727602] env[68285]: value = "task-2891997" [ 1109.727602] env[68285]: _type = "Task" [ 1109.727602] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.736868] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891997, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.867864] env[68285]: DEBUG nova.compute.manager [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1109.867864] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1109.868011] env[68285]: INFO nova.compute.manager [-] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Took 1.66 seconds to deallocate network for instance. [ 1109.868717] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2b3d7d-d002-4479-8e4a-628c6f958a99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.878416] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.878847] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0e5913d-922e-4fda-8056-06398e6a9994 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.884876] env[68285]: DEBUG oslo_vmware.api [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1109.884876] env[68285]: value = "task-2891998" [ 1109.884876] env[68285]: _type = "Task" [ 1109.884876] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.888464] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.888802] env[68285]: DEBUG nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1109.892205] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.138s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.893881] env[68285]: INFO nova.compute.claims [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.908016] env[68285]: DEBUG oslo_vmware.api [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.022858] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524b6ca3-a72a-7e20-c631-1c6b03bb84be, 'name': SearchDatastore_Task, 'duration_secs': 0.009028} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.023671] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76a7de30-31b0-4ce6-b5de-2e051698578d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.029832] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1110.029832] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bbbfe1-4ec3-632d-4b65-3f79f470050c" [ 1110.029832] env[68285]: _type = "Task" [ 1110.029832] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.038982] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bbbfe1-4ec3-632d-4b65-3f79f470050c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.222191] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-1f5fe064-0443-4b7f-911a-45d803836eeb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.222596] env[68285]: DEBUG nova.objects.instance [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lazy-loading 'migration_context' on Instance uuid 1f5fe064-0443-4b7f-911a-45d803836eeb {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.241084] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891997, 'name': Rename_Task, 'duration_secs': 0.178425} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.241428] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1110.241729] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d0ce744-e9de-4e4f-8431-51e6efd2e0a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.249058] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1110.249058] env[68285]: value = "task-2891999" [ 1110.249058] env[68285]: _type = "Task" [ 1110.249058] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.258905] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.380884] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.394858] env[68285]: DEBUG oslo_vmware.api [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2891998, 'name': PowerOffVM_Task, 'duration_secs': 0.183016} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.395135] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.395304] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1110.395553] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a79baf3b-a84b-4cf6-935f-8bb2f9f41cd4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.398284] env[68285]: DEBUG nova.compute.utils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1110.401729] env[68285]: DEBUG nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1110.401729] env[68285]: DEBUG nova.network.neutron [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1110.452715] env[68285]: DEBUG nova.policy [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27f1f7ed24e54a15aa56d650914ca830', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e2e033b9e69480c92c3010c4899a04a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1110.462732] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1110.462947] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1110.463150] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleting the datastore file [datastore1] 8ebbf943-2cef-4c99-a1c4-b1d213fd9884 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1110.463423] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a42eb70-f359-4663-9af9-3647f2cb72af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.470729] env[68285]: DEBUG oslo_vmware.api [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for the task: (returnval){ [ 1110.470729] env[68285]: value = "task-2892001" [ 1110.470729] env[68285]: _type = "Task" [ 1110.470729] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.480543] env[68285]: DEBUG oslo_vmware.api [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2892001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.542259] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bbbfe1-4ec3-632d-4b65-3f79f470050c, 'name': SearchDatastore_Task, 'duration_secs': 0.010415} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.542556] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.542859] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1110.543167] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14541451-68b5-427f-9a96-5a876b65d92b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.549826] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1110.549826] env[68285]: value = "task-2892002" [ 1110.549826] env[68285]: _type = "Task" [ 1110.549826] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.557395] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.725517] env[68285]: DEBUG nova.objects.base [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Object Instance<1f5fe064-0443-4b7f-911a-45d803836eeb> lazy-loaded attributes: info_cache,migration_context {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1110.726579] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27d15d0-8dff-46e3-8dc4-fccf14bac7dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.748965] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66082837-204c-4d7f-8e31-df809ca61d38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.757509] env[68285]: DEBUG oslo_vmware.api [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1110.757509] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525c5a76-28e5-0c6e-0b83-a8edefd97195" [ 1110.757509] env[68285]: _type = "Task" [ 1110.757509] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.761783] env[68285]: DEBUG oslo_vmware.api [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2891999, 'name': PowerOnVM_Task, 'duration_secs': 0.464284} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.764905] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1110.765137] env[68285]: INFO nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Took 5.82 seconds to spawn the instance on the hypervisor. [ 1110.765380] env[68285]: DEBUG nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1110.766201] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fc9433-1f80-423b-a9a7-a14d0a388f3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.777034] env[68285]: DEBUG oslo_vmware.api [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525c5a76-28e5-0c6e-0b83-a8edefd97195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.821210] env[68285]: DEBUG nova.network.neutron [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Successfully created port: 6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.902533] env[68285]: DEBUG nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1110.986915] env[68285]: DEBUG oslo_vmware.api [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Task: {'id': task-2892001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202723} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.987252] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1110.987461] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1110.987685] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1110.987893] env[68285]: INFO nova.compute.manager [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1110.988244] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.988424] env[68285]: DEBUG nova.compute.manager [-] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1110.988544] env[68285]: DEBUG nova.network.neutron [-] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1111.061280] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892002, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.277047] env[68285]: DEBUG oslo_vmware.api [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525c5a76-28e5-0c6e-0b83-a8edefd97195, 'name': SearchDatastore_Task, 'duration_secs': 0.032287} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.277462] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.295954] env[68285]: INFO nova.compute.manager [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Took 31.57 seconds to build instance. [ 1111.336712] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76440242-9bd5-496e-9e58-146cfa3779a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.346848] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736163e0-d550-4e59-b629-8b40fb707bfc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.382865] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7af9b01-812e-4a27-8d51-0589f331609e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.393860] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d13bf8-b1c0-4346-a243-a1ebd5ce55c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.407920] env[68285]: DEBUG nova.compute.provider_tree [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.411519] env[68285]: INFO nova.virt.block_device [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Booting with volume df3857a1-3fc9-43c0-a99d-e1a7509342bb at /dev/sda [ 1111.464718] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ffb3291-4742-40ee-95d9-bac3aebae422 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.475098] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51abe61e-d0b4-42ce-8ad6-74bb51e4e89e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.514872] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e69071a2-ce46-4a6e-97bc-e6b9b4ba7880 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.523931] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35e555a-fe9b-4926-bcee-867eb398fcfc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.560897] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3eb2e39-780b-43e5-918a-c34072bd87ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.566433] env[68285]: DEBUG nova.compute.manager [req-9fbb65c8-c6f6-4db1-b412-6f6f04859d3e req-a5398a30-92c1-4dfe-b781-baace745cc6c service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Received event network-vif-deleted-c6e96dad-6c94-4213-96ce-d96ca6b80bbe {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.566628] env[68285]: INFO nova.compute.manager [req-9fbb65c8-c6f6-4db1-b412-6f6f04859d3e req-a5398a30-92c1-4dfe-b781-baace745cc6c service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Neutron deleted interface c6e96dad-6c94-4213-96ce-d96ca6b80bbe; detaching it from the instance and deleting it from the info cache [ 1111.566798] env[68285]: DEBUG nova.network.neutron [req-9fbb65c8-c6f6-4db1-b412-6f6f04859d3e req-a5398a30-92c1-4dfe-b781-baace745cc6c service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.574528] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521775} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.575065] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1111.575256] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.576135] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad7c999-a0d2-4838-a97c-202fb6814c14 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.579017] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-594b19d1-3f93-4a23-b9e3-55d04b3b79d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.590095] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1111.590095] env[68285]: value = "task-2892003" [ 1111.590095] env[68285]: _type = "Task" [ 1111.590095] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.594336] env[68285]: DEBUG nova.virt.block_device [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Updating existing volume attachment record: 64c38572-e86b-49d7-bb37-b4558d73a53e {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1111.607313] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892003, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.797563] env[68285]: DEBUG oslo_concurrency.lockutils [None req-693a2ddd-f5c3-4d31-8915-d9ae5462c187 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "1a040977-b57e-4b67-b259-065b788141de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.089s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.914747] env[68285]: DEBUG nova.scheduler.client.report [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1111.969057] env[68285]: DEBUG nova.network.neutron [-] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.076507] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e82d7ecd-c38e-4bb7-8985-8e25ae216da0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.088679] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b491b7e-52dc-4f90-8835-eb4003bf800a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.108206] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892003, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078772} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.108822] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1112.109778] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6265d6-8b5e-4792-97b9-c223683422fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.124442] env[68285]: DEBUG nova.compute.manager [req-9fbb65c8-c6f6-4db1-b412-6f6f04859d3e req-a5398a30-92c1-4dfe-b781-baace745cc6c service nova] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Detach interface failed, port_id=c6e96dad-6c94-4213-96ce-d96ca6b80bbe, reason: Instance 8ebbf943-2cef-4c99-a1c4-b1d213fd9884 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1112.150168] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.150168] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90563c5d-d0f2-4d8c-adf8-c4d31d14ae95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.174671] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1112.174671] env[68285]: value = "task-2892004" [ 1112.174671] env[68285]: _type = "Task" [ 1112.174671] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.183743] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892004, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.313620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.314767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.314767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.314767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.314767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.317248] env[68285]: INFO nova.compute.manager [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Terminating instance [ 1112.420317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.420848] env[68285]: DEBUG nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1112.424639] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.647s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.425032] env[68285]: DEBUG nova.objects.instance [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lazy-loading 'resources' on Instance uuid 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.453220] env[68285]: DEBUG nova.compute.manager [req-21403174-1c19-4645-b901-a4f2f2a1bda0 req-264f3965-5e8b-49b0-a9d5-df591a9ae14c service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Received event network-vif-plugged-6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1112.453220] env[68285]: DEBUG oslo_concurrency.lockutils [req-21403174-1c19-4645-b901-a4f2f2a1bda0 req-264f3965-5e8b-49b0-a9d5-df591a9ae14c service nova] Acquiring lock "94652533-8c34-42fa-8d70-4effc307ec71-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.453220] env[68285]: DEBUG oslo_concurrency.lockutils [req-21403174-1c19-4645-b901-a4f2f2a1bda0 req-264f3965-5e8b-49b0-a9d5-df591a9ae14c service nova] Lock "94652533-8c34-42fa-8d70-4effc307ec71-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.453220] env[68285]: DEBUG oslo_concurrency.lockutils [req-21403174-1c19-4645-b901-a4f2f2a1bda0 req-264f3965-5e8b-49b0-a9d5-df591a9ae14c service nova] Lock "94652533-8c34-42fa-8d70-4effc307ec71-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.453220] env[68285]: DEBUG nova.compute.manager [req-21403174-1c19-4645-b901-a4f2f2a1bda0 req-264f3965-5e8b-49b0-a9d5-df591a9ae14c service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] No waiting events found dispatching network-vif-plugged-6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1112.453886] env[68285]: WARNING nova.compute.manager [req-21403174-1c19-4645-b901-a4f2f2a1bda0 req-264f3965-5e8b-49b0-a9d5-df591a9ae14c service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Received unexpected event network-vif-plugged-6c7e74ef-9f39-486f-8e6c-0e8339dac843 for instance with vm_state building and task_state block_device_mapping. [ 1112.471929] env[68285]: INFO nova.compute.manager [-] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Took 1.48 seconds to deallocate network for instance. [ 1112.552322] env[68285]: DEBUG nova.network.neutron [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Successfully updated port: 6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1112.682251] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892004, 'name': ReconfigVM_Task, 'duration_secs': 0.289148} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.682570] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1112.683251] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c470bc8-1821-4005-85b1-aef73cb17098 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.690567] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1112.690567] env[68285]: value = "task-2892005" [ 1112.690567] env[68285]: _type = "Task" [ 1112.690567] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.698677] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892005, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.821961] env[68285]: DEBUG nova.compute.manager [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1112.822986] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.823112] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2883f323-56ae-4f22-a936-757343f64105 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.834993] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1112.834993] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d394e30-4ecf-4cc9-ac29-f362980bb9e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.837737] env[68285]: DEBUG oslo_vmware.api [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1112.837737] env[68285]: value = "task-2892006" [ 1112.837737] env[68285]: _type = "Task" [ 1112.837737] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.850297] env[68285]: DEBUG oslo_vmware.api [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.928305] env[68285]: DEBUG nova.compute.utils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1112.934574] env[68285]: DEBUG nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1112.934764] env[68285]: DEBUG nova.network.neutron [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1112.978607] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.983249] env[68285]: DEBUG nova.policy [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c24b9d2248894d52a699df20175b2692', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fb202eb50a74c558edb6fdb9dfaf077', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1113.059723] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Acquiring lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.060311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Acquired lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.061428] env[68285]: DEBUG nova.network.neutron [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.206586] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892005, 'name': Rename_Task, 'duration_secs': 0.141562} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.210027] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1113.210027] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0365b2ec-a298-4a73-9735-032935a742e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.217786] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1113.217786] env[68285]: value = "task-2892007" [ 1113.217786] env[68285]: _type = "Task" [ 1113.217786] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.226699] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892007, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.353280] env[68285]: DEBUG oslo_vmware.api [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892006, 'name': PowerOffVM_Task, 'duration_secs': 0.229081} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.353583] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1113.353766] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1113.354067] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-960f8774-2a91-4bd2-b9c9-c352acf3c5ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.402027] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f401238-2a03-490e-bdec-2f48dffe79fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.411639] env[68285]: DEBUG nova.network.neutron [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Successfully created port: 66251dd3-78e6-4e1c-8c80-ad0eac62cd8e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1113.414778] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f02b8c9-20d5-4898-8ea3-4e9d7c6436c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.452359] env[68285]: DEBUG nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1113.457628] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b611e47-a9f9-4c87-92a5-2e8a31d007d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.460957] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1113.462118] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1113.462118] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleting the datastore file [datastore1] 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1113.462317] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63f5ae7d-7a9e-4de2-9be5-46c73fe4bb79 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.470465] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a937e47f-7ccd-40ac-af7f-726c7d91a057 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.475627] env[68285]: DEBUG oslo_vmware.api [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1113.475627] env[68285]: value = "task-2892009" [ 1113.475627] env[68285]: _type = "Task" [ 1113.475627] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.493141] env[68285]: DEBUG nova.compute.provider_tree [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.499559] env[68285]: DEBUG oslo_vmware.api [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.616026] env[68285]: DEBUG nova.network.neutron [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1113.711906] env[68285]: DEBUG nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1113.711906] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.711906] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.711906] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.711906] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.711906] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.711906] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.712313] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.712313] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.712535] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.712633] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.712828] env[68285]: DEBUG nova.virt.hardware [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.713834] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b6e361-ef96-41db-856f-06318ef06705 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.726654] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072b7f63-e355-47cc-8efb-f227ea80e323 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.745339] env[68285]: DEBUG oslo_vmware.api [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892007, 'name': PowerOnVM_Task, 'duration_secs': 0.495058} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.745840] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1113.746058] env[68285]: INFO nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Took 6.37 seconds to spawn the instance on the hypervisor. [ 1113.746244] env[68285]: DEBUG nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.747032] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ecf1d5-4113-4cc5-b841-d67c39e5ae18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.845811] env[68285]: DEBUG nova.network.neutron [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Updating instance_info_cache with network_info: [{"id": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "address": "fa:16:3e:76:47:74", "network": {"id": "57ce37a6-69c3-4208-b3d8-e3ac9775fedc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-549400015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e2e033b9e69480c92c3010c4899a04a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c7e74ef-9f", "ovs_interfaceid": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.991357] env[68285]: DEBUG oslo_vmware.api [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262495} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.991546] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.991725] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.991894] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.992073] env[68285]: INFO nova.compute.manager [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1113.992315] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.992514] env[68285]: DEBUG nova.compute.manager [-] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.992635] env[68285]: DEBUG nova.network.neutron [-] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.997144] env[68285]: DEBUG nova.scheduler.client.report [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.272821] env[68285]: INFO nova.compute.manager [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Took 32.70 seconds to build instance. [ 1114.355331] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Releasing lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.356692] env[68285]: DEBUG nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Instance network_info: |[{"id": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "address": "fa:16:3e:76:47:74", "network": {"id": "57ce37a6-69c3-4208-b3d8-e3ac9775fedc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-549400015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e2e033b9e69480c92c3010c4899a04a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c7e74ef-9f", "ovs_interfaceid": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1114.356692] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:47:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '140f4558-c11e-4af4-ab36-234e2d2f80a4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c7e74ef-9f39-486f-8e6c-0e8339dac843', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.367959] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Creating folder: Project (4e2e033b9e69480c92c3010c4899a04a). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1114.367959] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de4efafa-5b3f-4c75-a1af-6ed4b8bd210f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.380049] env[68285]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1114.380049] env[68285]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68285) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1114.383219] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Folder already exists: Project (4e2e033b9e69480c92c3010c4899a04a). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1114.383219] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Creating folder: Instances. Parent ref: group-v580970. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1114.383219] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c1437c8-4910-4d57-b98a-d78423ec9e81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.389902] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Created folder: Instances in parent group-v580970. [ 1114.390159] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1114.390360] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1114.390560] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbb9156d-5630-4999-8da8-b584c90b52fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.411275] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.411275] env[68285]: value = "task-2892012" [ 1114.411275] env[68285]: _type = "Task" [ 1114.411275] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.420793] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892012, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.469336] env[68285]: DEBUG nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1114.507477] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1114.508237] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1114.508237] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1114.508237] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1114.508237] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1114.508437] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1114.508569] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1114.508742] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1114.508886] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1114.509174] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1114.509354] env[68285]: DEBUG nova.virt.hardware [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1114.510195] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.086s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.514184] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c51194-af0e-4c1a-bd32-0843a3732325 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.515760] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.383s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.517243] env[68285]: INFO nova.compute.claims [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1114.525548] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5ae55c-7792-400c-bb79-e20e56a94126 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.548604] env[68285]: INFO nova.scheduler.client.report [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Deleted allocations for instance 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81 [ 1114.596016] env[68285]: DEBUG nova.compute.manager [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Received event network-changed-6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1114.596237] env[68285]: DEBUG nova.compute.manager [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Refreshing instance network info cache due to event network-changed-6c7e74ef-9f39-486f-8e6c-0e8339dac843. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1114.596473] env[68285]: DEBUG oslo_concurrency.lockutils [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] Acquiring lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.596620] env[68285]: DEBUG oslo_concurrency.lockutils [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] Acquired lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.596777] env[68285]: DEBUG nova.network.neutron [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Refreshing network info cache for port 6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1114.644472] env[68285]: DEBUG nova.compute.manager [req-5625f5a1-3a69-42dc-b44e-f7ca66e612f3 req-610eb7b9-b63c-44f6-b439-e7c8ecd44782 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Received event network-vif-deleted-70ff5ae8-d9d3-4840-abcd-733345a4c4af {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1114.644886] env[68285]: INFO nova.compute.manager [req-5625f5a1-3a69-42dc-b44e-f7ca66e612f3 req-610eb7b9-b63c-44f6-b439-e7c8ecd44782 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Neutron deleted interface 70ff5ae8-d9d3-4840-abcd-733345a4c4af; detaching it from the instance and deleting it from the info cache [ 1114.644886] env[68285]: DEBUG nova.network.neutron [req-5625f5a1-3a69-42dc-b44e-f7ca66e612f3 req-610eb7b9-b63c-44f6-b439-e7c8ecd44782 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.775114] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2e7a96ce-405e-42ea-ba02-780b84bee940 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "8c299247-896d-4ff1-b73a-22a71ec972fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.915s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.922648] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892012, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.924938] env[68285]: INFO nova.compute.manager [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Rebuilding instance [ 1114.969880] env[68285]: DEBUG nova.compute.manager [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1114.970768] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61922b7-7262-48d2-ba83-510f57e1a670 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.050474] env[68285]: DEBUG nova.network.neutron [-] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.060512] env[68285]: DEBUG oslo_concurrency.lockutils [None req-66bfea37-b3a2-421b-a344-8d331a47135d tempest-InstanceActionsTestJSON-2107229964 tempest-InstanceActionsTestJSON-2107229964-project-member] Lock "2e5a2839-3cdf-436d-89eb-5d6f83c3bf81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.854s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.147838] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c9dd3c34-8fe9-49e5-b7ef-9fefbca50317 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.162053] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ff011a-74e1-4a20-8edc-9851673ce740 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.210139] env[68285]: DEBUG nova.compute.manager [req-5625f5a1-3a69-42dc-b44e-f7ca66e612f3 req-610eb7b9-b63c-44f6-b439-e7c8ecd44782 service nova] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Detach interface failed, port_id=70ff5ae8-d9d3-4840-abcd-733345a4c4af, reason: Instance 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1115.397469] env[68285]: DEBUG nova.network.neutron [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Updated VIF entry in instance network info cache for port 6c7e74ef-9f39-486f-8e6c-0e8339dac843. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1115.397859] env[68285]: DEBUG nova.network.neutron [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Updating instance_info_cache with network_info: [{"id": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "address": "fa:16:3e:76:47:74", "network": {"id": "57ce37a6-69c3-4208-b3d8-e3ac9775fedc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-549400015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e2e033b9e69480c92c3010c4899a04a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c7e74ef-9f", "ovs_interfaceid": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.423393] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892012, 'name': CreateVM_Task, 'duration_secs': 0.619545} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.423578] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1115.424523] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'device_type': None, 'attachment_id': '64c38572-e86b-49d7-bb37-b4558d73a53e', 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580976', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'name': 'volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94652533-8c34-42fa-8d70-4effc307ec71', 'attached_at': '', 'detached_at': '', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'serial': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb'}, 'volume_type': None}], 'swap': None} {{(pid=68285) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1115.424523] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Root volume attach. Driver type: vmdk {{(pid=68285) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1115.425571] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2ec493-21e5-44b3-aedb-56206b0684f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.435179] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f66f48-adc0-44d0-b7f9-55120bcb3f89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.443039] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4128f6b4-f3fb-49b5-85e5-a213aad3fff1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.450526] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-eebd273d-f239-4c42-8e5c-973c1e5f112c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.457797] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1115.457797] env[68285]: value = "task-2892013" [ 1115.457797] env[68285]: _type = "Task" [ 1115.457797] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.466901] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892013, 'name': RelocateVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.552846] env[68285]: INFO nova.compute.manager [-] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Took 1.56 seconds to deallocate network for instance. [ 1115.626994] env[68285]: DEBUG nova.network.neutron [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Successfully updated port: 66251dd3-78e6-4e1c-8c80-ad0eac62cd8e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1115.897462] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfacbd29-51d0-4b00-b3b2-5567e0d8e7a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.905414] env[68285]: DEBUG oslo_concurrency.lockutils [req-743004b7-46ab-4d89-95f1-be57021b030c req-1a790323-f748-42fb-88b0-ba780417baa9 service nova] Releasing lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.907119] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c26cb8-4cbd-4a46-bf9d-ac03185ff916 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.939627] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90d3a25-fff8-4cf8-b03f-abac71c39321 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.947937] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bed67bc-113f-4d9a-ab35-a6d503993073 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.962714] env[68285]: DEBUG nova.compute.provider_tree [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.973246] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892013, 'name': RelocateVM_Task, 'duration_secs': 0.435171} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.973533] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1115.973736] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580976', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'name': 'volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94652533-8c34-42fa-8d70-4effc307ec71', 'attached_at': '', 'detached_at': '', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'serial': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1115.975276] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ef4a24-bde4-4b4b-9f70-02f7d0699627 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.996147] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1115.996801] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8052b7e3-2b24-446c-be53-34f7ebf0f7c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.998818] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337ae405-0fb2-437d-8e72-7ad025fe435d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.021292] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb/volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.022529] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3de0dfd-0a1a-4c7f-ad54-5e2de37078ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.036942] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1116.036942] env[68285]: value = "task-2892014" [ 1116.036942] env[68285]: _type = "Task" [ 1116.036942] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.042251] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1116.042251] env[68285]: value = "task-2892015" [ 1116.042251] env[68285]: _type = "Task" [ 1116.042251] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.053981] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892015, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.070317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.132538] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "refresh_cache-ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.132682] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "refresh_cache-ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.132839] env[68285]: DEBUG nova.network.neutron [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.469549] env[68285]: DEBUG nova.scheduler.client.report [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.547175] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892014, 'name': PowerOffVM_Task, 'duration_secs': 0.158267} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.550623] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.550901] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1116.551663] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6eec19b-4948-4464-a49e-8f9dbda17c1a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.559273] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892015, 'name': ReconfigVM_Task, 'duration_secs': 0.269067} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.561519] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Reconfigured VM instance instance-00000050 to attach disk [datastore1] volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb/volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1116.567051] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1116.567051] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a57ed12-b8d2-4bf4-a3af-da54fa4bdacf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.577467] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0eb05d1f-763e-4bf9-9fec-649f2c83ba58 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.585156] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1116.585156] env[68285]: value = "task-2892017" [ 1116.585156] env[68285]: _type = "Task" [ 1116.585156] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.595920] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892017, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.607057] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1116.607057] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1116.607057] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleting the datastore file [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1116.607057] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f79945c-4f08-481f-b5de-4fb1a32e9c26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.612549] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1116.612549] env[68285]: value = "task-2892018" [ 1116.612549] env[68285]: _type = "Task" [ 1116.612549] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.621556] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.631464] env[68285]: DEBUG nova.compute.manager [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Received event network-vif-plugged-66251dd3-78e6-4e1c-8c80-ad0eac62cd8e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1116.631701] env[68285]: DEBUG oslo_concurrency.lockutils [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] Acquiring lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.632098] env[68285]: DEBUG oslo_concurrency.lockutils [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.632316] env[68285]: DEBUG oslo_concurrency.lockutils [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.632506] env[68285]: DEBUG nova.compute.manager [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] No waiting events found dispatching network-vif-plugged-66251dd3-78e6-4e1c-8c80-ad0eac62cd8e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1116.632692] env[68285]: WARNING nova.compute.manager [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Received unexpected event network-vif-plugged-66251dd3-78e6-4e1c-8c80-ad0eac62cd8e for instance with vm_state building and task_state spawning. [ 1116.632857] env[68285]: DEBUG nova.compute.manager [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Received event network-changed-66251dd3-78e6-4e1c-8c80-ad0eac62cd8e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1116.633281] env[68285]: DEBUG nova.compute.manager [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Refreshing instance network info cache due to event network-changed-66251dd3-78e6-4e1c-8c80-ad0eac62cd8e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1116.633502] env[68285]: DEBUG oslo_concurrency.lockutils [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] Acquiring lock "refresh_cache-ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.665194] env[68285]: DEBUG nova.network.neutron [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.885175] env[68285]: DEBUG nova.network.neutron [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Updating instance_info_cache with network_info: [{"id": "66251dd3-78e6-4e1c-8c80-ad0eac62cd8e", "address": "fa:16:3e:48:46:51", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66251dd3-78", "ovs_interfaceid": "66251dd3-78e6-4e1c-8c80-ad0eac62cd8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.975106] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.975731] env[68285]: DEBUG nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1116.979111] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.282s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.979411] env[68285]: DEBUG nova.objects.instance [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1117.095087] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892017, 'name': ReconfigVM_Task, 'duration_secs': 0.127427} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.095400] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580976', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'name': 'volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94652533-8c34-42fa-8d70-4effc307ec71', 'attached_at': '', 'detached_at': '', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'serial': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1117.095922] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a58b8448-87a7-4197-b4ce-162546e03924 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.102431] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1117.102431] env[68285]: value = "task-2892019" [ 1117.102431] env[68285]: _type = "Task" [ 1117.102431] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.110775] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892019, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.120454] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108296} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.120517] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1117.121573] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1117.121573] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1117.391228] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "refresh_cache-ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.392028] env[68285]: DEBUG nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Instance network_info: |[{"id": "66251dd3-78e6-4e1c-8c80-ad0eac62cd8e", "address": "fa:16:3e:48:46:51", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66251dd3-78", "ovs_interfaceid": "66251dd3-78e6-4e1c-8c80-ad0eac62cd8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.392028] env[68285]: DEBUG oslo_concurrency.lockutils [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] Acquired lock "refresh_cache-ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.392247] env[68285]: DEBUG nova.network.neutron [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Refreshing network info cache for port 66251dd3-78e6-4e1c-8c80-ad0eac62cd8e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.393833] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:46:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66251dd3-78e6-4e1c-8c80-ad0eac62cd8e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.401333] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.404516] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.405357] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06e01862-66ea-4aeb-b068-a3673c5422bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.425987] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.425987] env[68285]: value = "task-2892020" [ 1117.425987] env[68285]: _type = "Task" [ 1117.425987] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.433992] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892020, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.488955] env[68285]: DEBUG nova.compute.utils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1117.491712] env[68285]: DEBUG nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1117.491712] env[68285]: DEBUG nova.network.neutron [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1117.540402] env[68285]: DEBUG nova.policy [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20f736434c6b457c9ce87771ace6a728', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5fd7bc7649b647939584cc01c1f3b5d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1117.612515] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892019, 'name': Rename_Task, 'duration_secs': 0.122405} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.612806] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1117.613060] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b62063f-2bf8-4013-8703-e4624e4bb1c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.619338] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1117.619338] env[68285]: value = "task-2892021" [ 1117.619338] env[68285]: _type = "Task" [ 1117.619338] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.639082] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892021, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.851642] env[68285]: DEBUG nova.network.neutron [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Updated VIF entry in instance network info cache for port 66251dd3-78e6-4e1c-8c80-ad0eac62cd8e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1117.851901] env[68285]: DEBUG nova.network.neutron [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Updating instance_info_cache with network_info: [{"id": "66251dd3-78e6-4e1c-8c80-ad0eac62cd8e", "address": "fa:16:3e:48:46:51", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66251dd3-78", "ovs_interfaceid": "66251dd3-78e6-4e1c-8c80-ad0eac62cd8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.878271] env[68285]: DEBUG nova.network.neutron [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Successfully created port: 569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1117.937650] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892020, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.992714] env[68285]: DEBUG oslo_concurrency.lockutils [None req-351d7dae-80a2-48bd-8497-7a3dfaa5136e tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.993287] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.613s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.993492] env[68285]: DEBUG nova.objects.instance [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lazy-loading 'resources' on Instance uuid ec89a2a4-3bfc-45c5-b7f2-239b52995d6b {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.996489] env[68285]: DEBUG nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1118.132754] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892021, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.170479] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1118.170719] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1118.170902] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1118.171111] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1118.171261] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1118.171409] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1118.171618] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1118.171781] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1118.172213] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1118.172213] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1118.172333] env[68285]: DEBUG nova.virt.hardware [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1118.173565] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b354ff05-0d59-491e-9ab4-139a15457abd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.182011] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03774f7-020d-4292-8cb2-f101070387d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.196154] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1118.202692] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1118.203098] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1118.203257] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60178c82-3b8e-4e7d-99ce-510fc502522a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.227837] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1118.227837] env[68285]: value = "task-2892022" [ 1118.227837] env[68285]: _type = "Task" [ 1118.227837] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.235423] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892022, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.355418] env[68285]: DEBUG oslo_concurrency.lockutils [req-02d0cfa5-7d8e-4897-9717-166bc996951a req-76da6abf-6a5d-4d55-86b3-3028014613bd service nova] Releasing lock "refresh_cache-ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.437698] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892020, 'name': CreateVM_Task, 'duration_secs': 0.583676} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.437901] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.438583] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.438772] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.439099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.439360] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5c2a4b5-c1ad-4f5e-a3c9-af9850a4658a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.443905] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1118.443905] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524ab401-64c5-9e08-c512-04dbd712bea5" [ 1118.443905] env[68285]: _type = "Task" [ 1118.443905] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.452457] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524ab401-64c5-9e08-c512-04dbd712bea5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.632440] env[68285]: DEBUG oslo_vmware.api [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892021, 'name': PowerOnVM_Task, 'duration_secs': 0.73096} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.635391] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.635658] env[68285]: INFO nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Took 4.93 seconds to spawn the instance on the hypervisor. [ 1118.635878] env[68285]: DEBUG nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.637773] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65914d92-d040-4ba4-8e31-3303c7d41df8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.741016] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892022, 'name': CreateVM_Task, 'duration_secs': 0.407073} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.741204] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.741590] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.888694] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59bfa53-d1c5-4503-9a60-ad78901922ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.896151] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6790656-d9f5-4bd0-b5d5-5f9e21db0717 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.928896] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6728c9c8-56b6-455f-8c01-d4e60a34ee0a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.936965] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75d7ff0-97de-445c-9e18-d3d91028bce9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.951737] env[68285]: DEBUG nova.compute.provider_tree [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.961654] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524ab401-64c5-9e08-c512-04dbd712bea5, 'name': SearchDatastore_Task, 'duration_secs': 0.011157} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.962560] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.962798] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.963043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.963193] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.963374] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.963703] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.964282] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.964485] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48431012-01d8-45ed-926e-13b1e77350b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.966637] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5338922f-84f8-4c21-834f-928ef2ff3903 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.972596] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1118.972596] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520c086c-7231-fe41-b6e8-558a13465b0e" [ 1118.972596] env[68285]: _type = "Task" [ 1118.972596] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.976817] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.976999] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1118.978712] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c3c3331-6206-4556-b2d0-950783819017 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.983415] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520c086c-7231-fe41-b6e8-558a13465b0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.986016] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1118.986016] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f3a117-678d-f70b-296a-5510d3b1304d" [ 1118.986016] env[68285]: _type = "Task" [ 1118.986016] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.993281] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f3a117-678d-f70b-296a-5510d3b1304d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.007066] env[68285]: DEBUG nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1119.041297] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1119.041573] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1119.041730] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1119.041919] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1119.042083] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1119.042238] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1119.042446] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1119.042604] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1119.042769] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1119.042933] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1119.043121] env[68285]: DEBUG nova.virt.hardware [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1119.044365] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ed88ff-6230-431a-a44e-33d2a3613081 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.051743] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b38a2e1-ea70-4a3f-a775-d2f2b9d9fe39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.160242] env[68285]: INFO nova.compute.manager [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Took 32.42 seconds to build instance. [ 1119.351183] env[68285]: DEBUG nova.compute.manager [req-56aca78d-4012-42d3-8440-c8895f39c357 req-a01422ae-daa2-4fc4-8f1c-e42c6bdf5db6 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Received event network-vif-plugged-569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1119.351317] env[68285]: DEBUG oslo_concurrency.lockutils [req-56aca78d-4012-42d3-8440-c8895f39c357 req-a01422ae-daa2-4fc4-8f1c-e42c6bdf5db6 service nova] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.351526] env[68285]: DEBUG oslo_concurrency.lockutils [req-56aca78d-4012-42d3-8440-c8895f39c357 req-a01422ae-daa2-4fc4-8f1c-e42c6bdf5db6 service nova] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.351852] env[68285]: DEBUG oslo_concurrency.lockutils [req-56aca78d-4012-42d3-8440-c8895f39c357 req-a01422ae-daa2-4fc4-8f1c-e42c6bdf5db6 service nova] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.351852] env[68285]: DEBUG nova.compute.manager [req-56aca78d-4012-42d3-8440-c8895f39c357 req-a01422ae-daa2-4fc4-8f1c-e42c6bdf5db6 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] No waiting events found dispatching network-vif-plugged-569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1119.352020] env[68285]: WARNING nova.compute.manager [req-56aca78d-4012-42d3-8440-c8895f39c357 req-a01422ae-daa2-4fc4-8f1c-e42c6bdf5db6 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Received unexpected event network-vif-plugged-569e9535-6252-4998-9567-e57ffca9a73b for instance with vm_state building and task_state spawning. [ 1119.394642] env[68285]: DEBUG nova.network.neutron [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Successfully updated port: 569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1119.458098] env[68285]: DEBUG nova.scheduler.client.report [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.483704] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520c086c-7231-fe41-b6e8-558a13465b0e, 'name': SearchDatastore_Task, 'duration_secs': 0.016828} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.484138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.484457] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1119.484733] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.495476] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f3a117-678d-f70b-296a-5510d3b1304d, 'name': SearchDatastore_Task, 'duration_secs': 0.033755} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.496268] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-040000c8-3405-46b3-88e0-0cd51487377f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.501098] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1119.501098] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f557b4-91e8-2825-44d3-6d7c6aeb1a91" [ 1119.501098] env[68285]: _type = "Task" [ 1119.501098] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.508674] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f557b4-91e8-2825-44d3-6d7c6aeb1a91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.662725] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b8dd85a7-f375-4ff9-ad79-b24cb934d42f tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "94652533-8c34-42fa-8d70-4effc307ec71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.934s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.896809] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.896809] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.896809] env[68285]: DEBUG nova.network.neutron [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1119.963463] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.967649] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.086s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.971557] env[68285]: INFO nova.compute.claims [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1120.017088] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f557b4-91e8-2825-44d3-6d7c6aeb1a91, 'name': SearchDatastore_Task, 'duration_secs': 0.030372} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.018365] env[68285]: INFO nova.scheduler.client.report [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Deleted allocations for instance ec89a2a4-3bfc-45c5-b7f2-239b52995d6b [ 1120.020145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.020145] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf/ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1120.020145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.020145] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1120.020369] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4ef2ba1-f6ff-4c90-bef9-27c229bc49de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.023195] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b7479f3-69dd-4f79-aa62-524932dedb46 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.038534] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1120.038685] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1120.040685] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1120.040685] env[68285]: value = "task-2892023" [ 1120.040685] env[68285]: _type = "Task" [ 1120.040685] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.043408] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21fdbfcc-0ce9-4ee2-be71-36e493179b38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.053620] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1120.053620] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d920f2-6ab4-a335-64e0-5a668597bd0e" [ 1120.053620] env[68285]: _type = "Task" [ 1120.053620] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.064351] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.074083] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d920f2-6ab4-a335-64e0-5a668597bd0e, 'name': SearchDatastore_Task, 'duration_secs': 0.011546} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.075632] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd87c949-f900-4765-b8e2-f9369d2d5569 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.082255] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1120.082255] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5225bb82-56a0-4fcf-1b08-2c00251e824f" [ 1120.082255] env[68285]: _type = "Task" [ 1120.082255] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.093333] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5225bb82-56a0-4fcf-1b08-2c00251e824f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.442730] env[68285]: DEBUG nova.network.neutron [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1120.533312] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bc075c13-2164-4352-b8e3-21875be9239d tempest-ServersAdminTestJSON-707772762 tempest-ServersAdminTestJSON-707772762-project-member] Lock "ec89a2a4-3bfc-45c5-b7f2-239b52995d6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.194s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.557037] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892023, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.594815] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5225bb82-56a0-4fcf-1b08-2c00251e824f, 'name': SearchDatastore_Task, 'duration_secs': 0.012111} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.599515] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.599515] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1120.599515] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1208c105-d0cd-4453-bf8c-65ec26db1262 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.603599] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1120.603599] env[68285]: value = "task-2892024" [ 1120.603599] env[68285]: _type = "Task" [ 1120.603599] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.613366] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.616366] env[68285]: DEBUG nova.network.neutron [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updating instance_info_cache with network_info: [{"id": "569e9535-6252-4998-9567-e57ffca9a73b", "address": "fa:16:3e:aa:36:4e", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap569e9535-62", "ovs_interfaceid": "569e9535-6252-4998-9567-e57ffca9a73b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.060429] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747287} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.060429] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf/ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1121.060429] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1121.060913] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73b9f4e5-9421-40ef-a581-579b05a30b6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.070539] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1121.070539] env[68285]: value = "task-2892025" [ 1121.070539] env[68285]: _type = "Task" [ 1121.070539] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.079619] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.115708] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892024, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.120086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.120363] env[68285]: DEBUG nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Instance network_info: |[{"id": "569e9535-6252-4998-9567-e57ffca9a73b", "address": "fa:16:3e:aa:36:4e", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap569e9535-62", "ovs_interfaceid": "569e9535-6252-4998-9567-e57ffca9a73b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1121.121209] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:36:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '569e9535-6252-4998-9567-e57ffca9a73b', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1121.134367] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1121.140257] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1121.140257] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bfcef4f-142e-4d53-a362-2ce7d2b44d6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.174038] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1121.174038] env[68285]: value = "task-2892026" [ 1121.174038] env[68285]: _type = "Task" [ 1121.174038] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.188735] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892026, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.394114] env[68285]: DEBUG nova.compute.manager [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Received event network-changed-569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.394366] env[68285]: DEBUG nova.compute.manager [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Refreshing instance network info cache due to event network-changed-569e9535-6252-4998-9567-e57ffca9a73b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1121.394624] env[68285]: DEBUG oslo_concurrency.lockutils [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] Acquiring lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.395397] env[68285]: DEBUG oslo_concurrency.lockutils [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] Acquired lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.395397] env[68285]: DEBUG nova.network.neutron [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Refreshing network info cache for port 569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1121.422689] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c8061a-baee-473b-a075-b08c0a7f4bf1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.431925] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddb2ea5-5d4e-4110-8523-cbdd3cb941e5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.480923] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978b0e73-d475-40f4-9f1a-bc0e1939d4ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.490455] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9ebcca-19ec-4f8e-9721-9b1888ebf71e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.506345] env[68285]: DEBUG nova.compute.provider_tree [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.581081] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153267} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.581381] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.582527] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a93d6b-4a51-4ab8-85cc-8a93fac58d75 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.604480] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf/ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.604795] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d401c74-0100-4f3a-a9ff-ba95c2c09aa5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.627900] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588862} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.629153] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1121.629383] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1121.629690] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1121.629690] env[68285]: value = "task-2892027" [ 1121.629690] env[68285]: _type = "Task" [ 1121.629690] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.629911] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1b450c6-e80a-4c9e-8507-a21455dbb754 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.640254] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892027, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.641669] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1121.641669] env[68285]: value = "task-2892028" [ 1121.641669] env[68285]: _type = "Task" [ 1121.641669] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.649161] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.685607] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892026, 'name': CreateVM_Task, 'duration_secs': 0.375106} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.685836] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1121.686546] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.686714] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.687043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1121.687310] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-323095bf-d27b-4ed2-8ef2-35e3b5dd0c9e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.692173] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1121.692173] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528b5102-22cc-32f1-dd72-c7c333c6cee1" [ 1121.692173] env[68285]: _type = "Task" [ 1121.692173] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.699648] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528b5102-22cc-32f1-dd72-c7c333c6cee1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.009607] env[68285]: DEBUG nova.scheduler.client.report [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.134017] env[68285]: DEBUG nova.network.neutron [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updated VIF entry in instance network info cache for port 569e9535-6252-4998-9567-e57ffca9a73b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1122.134017] env[68285]: DEBUG nova.network.neutron [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updating instance_info_cache with network_info: [{"id": "569e9535-6252-4998-9567-e57ffca9a73b", "address": "fa:16:3e:aa:36:4e", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap569e9535-62", "ovs_interfaceid": "569e9535-6252-4998-9567-e57ffca9a73b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.144287] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.152794] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.328951} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.153266] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1122.154758] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ac3123-23db-40e9-b898-80212f9e855e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.182613] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1122.183375] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1915d641-1a16-42d3-a310-3519962f5328 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.209481] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528b5102-22cc-32f1-dd72-c7c333c6cee1, 'name': SearchDatastore_Task, 'duration_secs': 0.060926} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.210922] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.211220] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1122.211688] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.212659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.213204] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.213762] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1122.213762] env[68285]: value = "task-2892029" [ 1122.213762] env[68285]: _type = "Task" [ 1122.213762] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.214200] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc03fa0f-5f7f-4712-8782-e7d6a35ae281 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.226772] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.231162] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.231445] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1122.233221] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf587c6f-1d1d-4d74-bc7d-46bb1cb20b75 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.239342] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1122.239342] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52330740-dcb3-7288-3ebc-db6ec8d29d87" [ 1122.239342] env[68285]: _type = "Task" [ 1122.239342] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.248596] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52330740-dcb3-7288-3ebc-db6ec8d29d87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.516443] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.517142] env[68285]: DEBUG nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1122.529026] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.300s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.529026] env[68285]: DEBUG nova.objects.instance [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lazy-loading 'resources' on Instance uuid bb806297-47c6-45b7-a177-f3300fa1e29a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.639018] env[68285]: DEBUG oslo_concurrency.lockutils [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] Releasing lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.639282] env[68285]: DEBUG nova.compute.manager [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Received event network-changed-6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1122.639441] env[68285]: DEBUG nova.compute.manager [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Refreshing instance network info cache due to event network-changed-6c7e74ef-9f39-486f-8e6c-0e8339dac843. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1122.639643] env[68285]: DEBUG oslo_concurrency.lockutils [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] Acquiring lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.639821] env[68285]: DEBUG oslo_concurrency.lockutils [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] Acquired lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.639960] env[68285]: DEBUG nova.network.neutron [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Refreshing network info cache for port 6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1122.644697] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892027, 'name': ReconfigVM_Task, 'duration_secs': 0.943325} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.645278] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Reconfigured VM instance instance-00000051 to attach disk [datastore1] ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf/ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.646818] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48e832fd-26bc-45d4-9c26-9e8330b7a00e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.653353] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1122.653353] env[68285]: value = "task-2892030" [ 1122.653353] env[68285]: _type = "Task" [ 1122.653353] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.663769] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892030, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.727757] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892029, 'name': ReconfigVM_Task, 'duration_secs': 0.315332} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.728086] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd/8c299247-896d-4ff1-b73a-22a71ec972fd.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.728752] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e3116bc-9b99-47d7-85e3-d1c3844f6c4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.735430] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1122.735430] env[68285]: value = "task-2892031" [ 1122.735430] env[68285]: _type = "Task" [ 1122.735430] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.746908] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892031, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.752754] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52330740-dcb3-7288-3ebc-db6ec8d29d87, 'name': SearchDatastore_Task, 'duration_secs': 0.021158} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.753629] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-898c32e5-4c96-4ecb-b4e2-1a7fce965bb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.759862] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1122.759862] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52977e72-df0c-e353-b2b4-968995bfa826" [ 1122.759862] env[68285]: _type = "Task" [ 1122.759862] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.768939] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52977e72-df0c-e353-b2b4-968995bfa826, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.946425] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.946425] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.035619] env[68285]: DEBUG nova.compute.utils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1123.037240] env[68285]: DEBUG nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1123.037383] env[68285]: DEBUG nova.network.neutron [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1123.094531] env[68285]: DEBUG nova.policy [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '184360cab7224b9eaef80dfe89d0208b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288595d9298e43fa859bc6b68054aa08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1123.164553] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892030, 'name': Rename_Task, 'duration_secs': 0.301015} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.165448] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.165448] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26d5d218-d09e-4be2-9042-5366d51d41c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.175992] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1123.175992] env[68285]: value = "task-2892032" [ 1123.175992] env[68285]: _type = "Task" [ 1123.175992] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.187455] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.247948] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892031, 'name': Rename_Task, 'duration_secs': 0.249659} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.251794] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.252824] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0917c805-b729-484c-8a78-cdca75e5bd01 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.262899] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1123.262899] env[68285]: value = "task-2892033" [ 1123.262899] env[68285]: _type = "Task" [ 1123.262899] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.284882] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52977e72-df0c-e353-b2b4-968995bfa826, 'name': SearchDatastore_Task, 'duration_secs': 0.013189} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.288092] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.288949] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7790f1e6-c73f-40d6-97af-00e9c518a09c/7790f1e6-c73f-40d6-97af-00e9c518a09c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1123.288949] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892033, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.288949] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af60e18b-20ec-447d-bb87-719dd2f74ad2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.296444] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1123.296444] env[68285]: value = "task-2892034" [ 1123.296444] env[68285]: _type = "Task" [ 1123.296444] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.306785] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.450051] env[68285]: DEBUG nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1123.485820] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb95dbd0-0cdb-4d45-b041-b48fbbe0ecbd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.491050] env[68285]: DEBUG nova.network.neutron [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Successfully created port: fe791a09-7a4b-45f2-aaa7-a87d7393bf19 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1123.500210] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9de6cb-b1d0-4b84-820a-7f0de5b9a4ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.534536] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db49cca3-55fe-41e5-9812-b4344e12fd9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.542293] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a79d6b1-e626-45bd-aa7d-9a17801d89a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.547343] env[68285]: DEBUG nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1123.561259] env[68285]: DEBUG nova.compute.provider_tree [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.692116] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892032, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.779720] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892033, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.808884] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892034, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.948724] env[68285]: DEBUG nova.network.neutron [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Updated VIF entry in instance network info cache for port 6c7e74ef-9f39-486f-8e6c-0e8339dac843. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.949204] env[68285]: DEBUG nova.network.neutron [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Updating instance_info_cache with network_info: [{"id": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "address": "fa:16:3e:76:47:74", "network": {"id": "57ce37a6-69c3-4208-b3d8-e3ac9775fedc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-549400015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e2e033b9e69480c92c3010c4899a04a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "140f4558-c11e-4af4-ab36-234e2d2f80a4", "external-id": "nsx-vlan-transportzone-638", "segmentation_id": 638, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c7e74ef-9f", "ovs_interfaceid": "6c7e74ef-9f39-486f-8e6c-0e8339dac843", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.977947] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.063071] env[68285]: DEBUG nova.scheduler.client.report [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.191110] env[68285]: DEBUG oslo_vmware.api [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892032, 'name': PowerOnVM_Task, 'duration_secs': 0.987065} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.191338] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.191457] env[68285]: INFO nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Took 9.72 seconds to spawn the instance on the hypervisor. [ 1124.191766] env[68285]: DEBUG nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.192957] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc1bca7-37f0-457b-a6b9-c888cf93944c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.277804] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892033, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.306067] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892034, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665213} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.306327] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 7790f1e6-c73f-40d6-97af-00e9c518a09c/7790f1e6-c73f-40d6-97af-00e9c518a09c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1124.306536] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1124.306775] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1685c3d7-21a9-48b1-b67f-e54ec041af0e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.312778] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1124.312778] env[68285]: value = "task-2892035" [ 1124.312778] env[68285]: _type = "Task" [ 1124.312778] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.321418] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892035, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.451990] env[68285]: DEBUG oslo_concurrency.lockutils [req-a69b050e-a913-4dfb-a05a-4e3dd0625cf3 req-9cb80739-fabe-480e-a297-5e37944b3be2 service nova] Releasing lock "refresh_cache-94652533-8c34-42fa-8d70-4effc307ec71" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.561450] env[68285]: DEBUG nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1124.568696] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.573617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.240s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.577093] env[68285]: INFO nova.compute.claims [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1124.601772] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1124.602509] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1124.602923] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1124.603328] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1124.603692] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1124.604048] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1124.604545] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1124.604960] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1124.605352] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1124.605723] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1124.606144] env[68285]: DEBUG nova.virt.hardware [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1124.607566] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09b7b3a-4ef7-418c-a4b9-925fb7672deb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.613429] env[68285]: INFO nova.scheduler.client.report [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Deleted allocations for instance bb806297-47c6-45b7-a177-f3300fa1e29a [ 1124.627112] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a2bad8-6fbc-48e5-bb5e-2f440017c016 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.716710] env[68285]: INFO nova.compute.manager [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Took 37.98 seconds to build instance. [ 1124.778079] env[68285]: DEBUG oslo_vmware.api [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892033, 'name': PowerOnVM_Task, 'duration_secs': 1.151001} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.778348] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.778560] env[68285]: DEBUG nova.compute.manager [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.779341] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9259e4c6-3ff6-490c-b8dc-0236dfa3a1b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.825063] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892035, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07372} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.825667] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1124.826137] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0a8ad3-923e-4640-bb3c-d3f3f380af52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.852914] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 7790f1e6-c73f-40d6-97af-00e9c518a09c/7790f1e6-c73f-40d6-97af-00e9c518a09c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1124.853560] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55019cb6-9cb2-4f9d-87dc-c1c633104200 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.874031] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1124.874031] env[68285]: value = "task-2892036" [ 1124.874031] env[68285]: _type = "Task" [ 1124.874031] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.882289] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892036, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.131777] env[68285]: DEBUG oslo_concurrency.lockutils [None req-55c5aad3-be09-4941-bad9-3c32d091f3d2 tempest-ServersListShow296Test-2096148982 tempest-ServersListShow296Test-2096148982-project-member] Lock "bb806297-47c6-45b7-a177-f3300fa1e29a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.158s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.162018] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.219120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a8ee720-24ab-45ad-abc5-6e32aede241b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.492s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.219772] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.060s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.219772] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.220038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.220038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.222761] env[68285]: INFO nova.compute.manager [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Terminating instance [ 1125.298657] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.390379] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.453602] env[68285]: DEBUG nova.compute.manager [req-f0a96f00-fb2c-4c29-b557-961f3da58335 req-2e56808e-f2d9-44e2-b8e2-b0d30b5df47a service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Received event network-vif-plugged-fe791a09-7a4b-45f2-aaa7-a87d7393bf19 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.453813] env[68285]: DEBUG oslo_concurrency.lockutils [req-f0a96f00-fb2c-4c29-b557-961f3da58335 req-2e56808e-f2d9-44e2-b8e2-b0d30b5df47a service nova] Acquiring lock "15fd3159-0fff-461d-96ce-f8cfc04eff32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.454039] env[68285]: DEBUG oslo_concurrency.lockutils [req-f0a96f00-fb2c-4c29-b557-961f3da58335 req-2e56808e-f2d9-44e2-b8e2-b0d30b5df47a service nova] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.454206] env[68285]: DEBUG oslo_concurrency.lockutils [req-f0a96f00-fb2c-4c29-b557-961f3da58335 req-2e56808e-f2d9-44e2-b8e2-b0d30b5df47a service nova] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.454934] env[68285]: DEBUG nova.compute.manager [req-f0a96f00-fb2c-4c29-b557-961f3da58335 req-2e56808e-f2d9-44e2-b8e2-b0d30b5df47a service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] No waiting events found dispatching network-vif-plugged-fe791a09-7a4b-45f2-aaa7-a87d7393bf19 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1125.454934] env[68285]: WARNING nova.compute.manager [req-f0a96f00-fb2c-4c29-b557-961f3da58335 req-2e56808e-f2d9-44e2-b8e2-b0d30b5df47a service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Received unexpected event network-vif-plugged-fe791a09-7a4b-45f2-aaa7-a87d7393bf19 for instance with vm_state building and task_state spawning. [ 1125.505827] env[68285]: DEBUG nova.network.neutron [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Successfully updated port: fe791a09-7a4b-45f2-aaa7-a87d7393bf19 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1125.734794] env[68285]: DEBUG nova.compute.manager [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1125.735014] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1125.735899] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d23a694-eda6-403c-89e0-a9c95f1d63f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.748984] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.751428] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e32e80be-eced-4022-8808-79e3307cabdd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.758030] env[68285]: DEBUG oslo_vmware.api [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1125.758030] env[68285]: value = "task-2892037" [ 1125.758030] env[68285]: _type = "Task" [ 1125.758030] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.771391] env[68285]: DEBUG oslo_vmware.api [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.886424] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892036, 'name': ReconfigVM_Task, 'duration_secs': 0.722122} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.886724] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 7790f1e6-c73f-40d6-97af-00e9c518a09c/7790f1e6-c73f-40d6-97af-00e9c518a09c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1125.887390] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f68b0e7a-2db2-4e42-83d4-a801e75f03c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.897361] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1125.897361] env[68285]: value = "task-2892038" [ 1125.897361] env[68285]: _type = "Task" [ 1125.897361] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.907959] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892038, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.929932] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab76892-2002-4208-a057-4ab030be53c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.938709] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff290718-3efe-4edf-867b-f705eddfaf06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.975738] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f761a5a-d10a-4211-ae9a-1d8d57e3e778 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.982841] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e877fe18-1961-4ba1-8e0c-1f97a3468af8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.997134] env[68285]: DEBUG nova.compute.provider_tree [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.011917] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "refresh_cache-15fd3159-0fff-461d-96ce-f8cfc04eff32" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.012093] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "refresh_cache-15fd3159-0fff-461d-96ce-f8cfc04eff32" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.012191] env[68285]: DEBUG nova.network.neutron [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.062841] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "8c299247-896d-4ff1-b73a-22a71ec972fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.063079] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "8c299247-896d-4ff1-b73a-22a71ec972fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.063305] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "8c299247-896d-4ff1-b73a-22a71ec972fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.063847] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "8c299247-896d-4ff1-b73a-22a71ec972fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.063847] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "8c299247-896d-4ff1-b73a-22a71ec972fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.066148] env[68285]: INFO nova.compute.manager [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Terminating instance [ 1126.273986] env[68285]: DEBUG oslo_vmware.api [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892037, 'name': PowerOffVM_Task, 'duration_secs': 0.239487} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.274390] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1126.274612] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1126.274938] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1413327e-8444-4236-a8fa-eaecc2806789 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.341600] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1126.342358] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1126.342358] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleting the datastore file [datastore1] ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1126.342358] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8208cad7-9eec-4404-a4ba-978646cba8c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.348572] env[68285]: DEBUG oslo_vmware.api [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1126.348572] env[68285]: value = "task-2892040" [ 1126.348572] env[68285]: _type = "Task" [ 1126.348572] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.356669] env[68285]: DEBUG oslo_vmware.api [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.408400] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892038, 'name': Rename_Task, 'duration_secs': 0.163938} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.408774] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1126.409042] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f749a121-f66a-469a-a882-09a06b925e2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.415810] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1126.415810] env[68285]: value = "task-2892041" [ 1126.415810] env[68285]: _type = "Task" [ 1126.415810] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.424927] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.500192] env[68285]: DEBUG nova.scheduler.client.report [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.548341] env[68285]: DEBUG nova.network.neutron [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1126.571524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "refresh_cache-8c299247-896d-4ff1-b73a-22a71ec972fd" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.571524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "refresh_cache-8c299247-896d-4ff1-b73a-22a71ec972fd" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.571524] env[68285]: DEBUG nova.network.neutron [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.698075] env[68285]: DEBUG nova.network.neutron [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Updating instance_info_cache with network_info: [{"id": "fe791a09-7a4b-45f2-aaa7-a87d7393bf19", "address": "fa:16:3e:9c:55:46", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe791a09-7a", "ovs_interfaceid": "fe791a09-7a4b-45f2-aaa7-a87d7393bf19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.858659] env[68285]: DEBUG oslo_vmware.api [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338336} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.858921] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1126.859124] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1126.859302] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1126.859488] env[68285]: INFO nova.compute.manager [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1126.859724] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.859941] env[68285]: DEBUG nova.compute.manager [-] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1126.860035] env[68285]: DEBUG nova.network.neutron [-] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1126.927719] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892041, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.005802] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.006441] env[68285]: DEBUG nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1127.009243] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.219s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.009463] env[68285]: DEBUG nova.objects.instance [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lazy-loading 'resources' on Instance uuid cbf2a387-8a5a-4400-833b-e04e23ca42f7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.096265] env[68285]: DEBUG nova.network.neutron [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1127.157024] env[68285]: DEBUG nova.network.neutron [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.201085] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "refresh_cache-15fd3159-0fff-461d-96ce-f8cfc04eff32" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.201085] env[68285]: DEBUG nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Instance network_info: |[{"id": "fe791a09-7a4b-45f2-aaa7-a87d7393bf19", "address": "fa:16:3e:9c:55:46", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe791a09-7a", "ovs_interfaceid": "fe791a09-7a4b-45f2-aaa7-a87d7393bf19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1127.201547] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:55:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe791a09-7a4b-45f2-aaa7-a87d7393bf19', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.209653] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1127.209964] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.210423] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d53951ee-2799-4215-bcd4-5ad007e1f7a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.234540] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.234540] env[68285]: value = "task-2892042" [ 1127.234540] env[68285]: _type = "Task" [ 1127.234540] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.242726] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892042, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.426930] env[68285]: DEBUG oslo_vmware.api [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892041, 'name': PowerOnVM_Task, 'duration_secs': 0.699462} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.427246] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1127.427406] env[68285]: INFO nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1127.427587] env[68285]: DEBUG nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1127.428464] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe29cdc5-65f7-4916-a1ad-c16255eb2be0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.500666] env[68285]: DEBUG nova.compute.manager [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Received event network-changed-fe791a09-7a4b-45f2-aaa7-a87d7393bf19 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1127.500888] env[68285]: DEBUG nova.compute.manager [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Refreshing instance network info cache due to event network-changed-fe791a09-7a4b-45f2-aaa7-a87d7393bf19. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1127.501101] env[68285]: DEBUG oslo_concurrency.lockutils [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] Acquiring lock "refresh_cache-15fd3159-0fff-461d-96ce-f8cfc04eff32" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.501251] env[68285]: DEBUG oslo_concurrency.lockutils [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] Acquired lock "refresh_cache-15fd3159-0fff-461d-96ce-f8cfc04eff32" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.501412] env[68285]: DEBUG nova.network.neutron [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Refreshing network info cache for port fe791a09-7a4b-45f2-aaa7-a87d7393bf19 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1127.516547] env[68285]: DEBUG nova.compute.utils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1127.520931] env[68285]: DEBUG nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1127.521688] env[68285]: DEBUG nova.network.neutron [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1127.577402] env[68285]: DEBUG nova.policy [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '557a46b01bbf41e4a343d20c8206aa96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9352aafac6e049feb8d74a91d1600224', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1127.623100] env[68285]: DEBUG nova.network.neutron [-] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.662116] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "refresh_cache-8c299247-896d-4ff1-b73a-22a71ec972fd" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.662656] env[68285]: DEBUG nova.compute.manager [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1127.662798] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1127.664153] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cb38d8-044e-485f-ae20-8efd80b0854a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.675393] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.678942] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31be6721-2816-4460-be3f-e25175ab6d7a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.687118] env[68285]: DEBUG oslo_vmware.api [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1127.687118] env[68285]: value = "task-2892043" [ 1127.687118] env[68285]: _type = "Task" [ 1127.687118] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.702324] env[68285]: DEBUG oslo_vmware.api [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892043, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.750166] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892042, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.873557] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac99917-0aa9-48ed-9df3-bd2fa0b00f97 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.881517] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af7d536-6b06-47a7-8e65-760fde69d025 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.912444] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1489f0-d725-4578-950f-6367dd29aba7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.919994] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60078256-ef27-42cc-8916-b51bf5fb8ee6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.933788] env[68285]: DEBUG nova.compute.provider_tree [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.947882] env[68285]: INFO nova.compute.manager [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Took 36.84 seconds to build instance. [ 1128.021925] env[68285]: DEBUG nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1128.039518] env[68285]: DEBUG nova.network.neutron [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Successfully created port: 8043e9aa-9cf1-40a1-b2aa-45573789ace9 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.126683] env[68285]: INFO nova.compute.manager [-] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Took 1.27 seconds to deallocate network for instance. [ 1128.197234] env[68285]: DEBUG oslo_vmware.api [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892043, 'name': PowerOffVM_Task, 'duration_secs': 0.302211} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.198060] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1128.199025] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1128.199025] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-012476f4-aed2-4ed9-9a93-b1801629b7a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.226511] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1128.226511] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1128.226511] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleting the datastore file [datastore1] 8c299247-896d-4ff1-b73a-22a71ec972fd {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1128.226511] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e70acdc3-e4fc-477d-9ad3-d867489c2d55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.231309] env[68285]: DEBUG oslo_vmware.api [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1128.231309] env[68285]: value = "task-2892045" [ 1128.231309] env[68285]: _type = "Task" [ 1128.231309] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.239384] env[68285]: DEBUG oslo_vmware.api [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.243759] env[68285]: DEBUG nova.network.neutron [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Updated VIF entry in instance network info cache for port fe791a09-7a4b-45f2-aaa7-a87d7393bf19. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.244127] env[68285]: DEBUG nova.network.neutron [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Updating instance_info_cache with network_info: [{"id": "fe791a09-7a4b-45f2-aaa7-a87d7393bf19", "address": "fa:16:3e:9c:55:46", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe791a09-7a", "ovs_interfaceid": "fe791a09-7a4b-45f2-aaa7-a87d7393bf19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.248881] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892042, 'name': CreateVM_Task, 'duration_secs': 0.519088} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.249679] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.250528] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.250743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.251174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1128.251494] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc0fe68f-5f20-46e2-a38b-12fc5fbcee4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.257511] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1128.257511] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ad50ff-49dc-c397-f8d9-1ddae0e7eafc" [ 1128.257511] env[68285]: _type = "Task" [ 1128.257511] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.267762] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ad50ff-49dc-c397-f8d9-1ddae0e7eafc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.436741] env[68285]: DEBUG nova.scheduler.client.report [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.450093] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2f76c2-6dab-4fc0-9885-8f2de1b06723 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.108s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.635978] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.685167] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "f13ad5e7-341f-4475-b334-2144b0923e3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.686244] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.686559] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "f13ad5e7-341f-4475-b334-2144b0923e3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.686731] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.686883] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.689211] env[68285]: INFO nova.compute.manager [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Terminating instance [ 1128.741972] env[68285]: DEBUG oslo_vmware.api [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106749} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.742543] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1128.742543] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1128.742716] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1128.742883] env[68285]: INFO nova.compute.manager [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1128.743135] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1128.743589] env[68285]: DEBUG nova.compute.manager [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1128.743589] env[68285]: DEBUG nova.network.neutron [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1128.750729] env[68285]: DEBUG oslo_concurrency.lockutils [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] Releasing lock "refresh_cache-15fd3159-0fff-461d-96ce-f8cfc04eff32" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.751079] env[68285]: DEBUG nova.compute.manager [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Received event network-vif-deleted-66251dd3-78e6-4e1c-8c80-ad0eac62cd8e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1128.751155] env[68285]: INFO nova.compute.manager [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Neutron deleted interface 66251dd3-78e6-4e1c-8c80-ad0eac62cd8e; detaching it from the instance and deleting it from the info cache [ 1128.751321] env[68285]: DEBUG nova.network.neutron [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.766980] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ad50ff-49dc-c397-f8d9-1ddae0e7eafc, 'name': SearchDatastore_Task, 'duration_secs': 0.009701} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.767857] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.768128] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.768368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.768510] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.768685] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.769185] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52d3e8e8-2c6c-47e8-a1f6-f2eb80fc1d2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.772180] env[68285]: DEBUG nova.network.neutron [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1128.777141] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.777649] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.778121] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a62b8ca1-0b0f-479f-9e62-c667ac810fd5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.787598] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1128.787598] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e143ea-3810-6ab6-cc68-13354934e1dd" [ 1128.787598] env[68285]: _type = "Task" [ 1128.787598] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.792986] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e143ea-3810-6ab6-cc68-13354934e1dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.945557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.933s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.945557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.019s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.945557] env[68285]: DEBUG nova.objects.instance [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lazy-loading 'resources' on Instance uuid d025b807-fda4-4aff-beac-0ad6a092fe74 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.970317] env[68285]: INFO nova.scheduler.client.report [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted allocations for instance cbf2a387-8a5a-4400-833b-e04e23ca42f7 [ 1129.032421] env[68285]: DEBUG nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1129.058978] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1129.059240] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.059396] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1129.059574] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.059719] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1129.059863] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1129.060123] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1129.060390] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1129.060565] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1129.060725] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1129.060912] env[68285]: DEBUG nova.virt.hardware [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1129.061778] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370d1ba6-972a-4567-b6e0-81d2f0970cc2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.071403] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6606647-91df-490d-8bb8-75a1e8e19417 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.195178] env[68285]: DEBUG nova.compute.manager [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1129.195178] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1129.196139] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cc338b-396e-49b5-82df-8a3d054b6bbb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.204644] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1129.204986] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a88335f0-efde-4b8a-abcb-57e41294e757 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.212111] env[68285]: DEBUG oslo_vmware.api [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 1129.212111] env[68285]: value = "task-2892046" [ 1129.212111] env[68285]: _type = "Task" [ 1129.212111] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.221496] env[68285]: DEBUG oslo_vmware.api [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2892046, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.254263] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cb2da51-0387-49be-ba00-e33ebb1773ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.264723] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ad2f9d-44a7-4a90-9ed4-75faf0c9d2c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.282027] env[68285]: DEBUG nova.network.neutron [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.297335] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e143ea-3810-6ab6-cc68-13354934e1dd, 'name': SearchDatastore_Task, 'duration_secs': 0.00773} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.313131] env[68285]: DEBUG nova.compute.manager [req-8fd8f006-6ac0-43f1-9bef-3f64f5e3b50e req-439c04d6-9f99-4f46-9e53-4dd815f5da16 service nova] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Detach interface failed, port_id=66251dd3-78e6-4e1c-8c80-ad0eac62cd8e, reason: Instance ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1129.313617] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b03154d0-59d2-4fb5-a02a-755a7cce965a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.319967] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1129.319967] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52852eda-68c0-7126-a81a-92e38e637622" [ 1129.319967] env[68285]: _type = "Task" [ 1129.319967] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.327956] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52852eda-68c0-7126-a81a-92e38e637622, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.480615] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9c1268c-a915-4319-9376-c53889aa6d62 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "cbf2a387-8a5a-4400-833b-e04e23ca42f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.150s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.539424] env[68285]: DEBUG nova.compute.manager [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Received event network-changed-569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1129.539700] env[68285]: DEBUG nova.compute.manager [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Refreshing instance network info cache due to event network-changed-569e9535-6252-4998-9567-e57ffca9a73b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1129.540020] env[68285]: DEBUG oslo_concurrency.lockutils [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] Acquiring lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.540176] env[68285]: DEBUG oslo_concurrency.lockutils [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] Acquired lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.540375] env[68285]: DEBUG nova.network.neutron [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Refreshing network info cache for port 569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1129.661813] env[68285]: DEBUG nova.compute.manager [req-727af889-9e24-4a79-8833-66df281343ee req-accb9916-566e-45b8-a442-b4b915dad30a service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Received event network-vif-plugged-8043e9aa-9cf1-40a1-b2aa-45573789ace9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1129.662238] env[68285]: DEBUG oslo_concurrency.lockutils [req-727af889-9e24-4a79-8833-66df281343ee req-accb9916-566e-45b8-a442-b4b915dad30a service nova] Acquiring lock "3094ed52-33c2-40ff-ac77-6bb975a2f681-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.662525] env[68285]: DEBUG oslo_concurrency.lockutils [req-727af889-9e24-4a79-8833-66df281343ee req-accb9916-566e-45b8-a442-b4b915dad30a service nova] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.662767] env[68285]: DEBUG oslo_concurrency.lockutils [req-727af889-9e24-4a79-8833-66df281343ee req-accb9916-566e-45b8-a442-b4b915dad30a service nova] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.663023] env[68285]: DEBUG nova.compute.manager [req-727af889-9e24-4a79-8833-66df281343ee req-accb9916-566e-45b8-a442-b4b915dad30a service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] No waiting events found dispatching network-vif-plugged-8043e9aa-9cf1-40a1-b2aa-45573789ace9 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1129.664063] env[68285]: WARNING nova.compute.manager [req-727af889-9e24-4a79-8833-66df281343ee req-accb9916-566e-45b8-a442-b4b915dad30a service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Received unexpected event network-vif-plugged-8043e9aa-9cf1-40a1-b2aa-45573789ace9 for instance with vm_state building and task_state spawning. [ 1129.723144] env[68285]: DEBUG oslo_vmware.api [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2892046, 'name': PowerOffVM_Task, 'duration_secs': 0.221863} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.723531] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.723617] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.723826] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd66bf97-31cb-4020-afcc-d6cf4253d609 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.771293] env[68285]: DEBUG nova.network.neutron [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Successfully updated port: 8043e9aa-9cf1-40a1-b2aa-45573789ace9 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1129.790632] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.790632] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.790632] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Deleting the datastore file [datastore2] f13ad5e7-341f-4475-b334-2144b0923e3b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.790632] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e0a8f61-40fa-44d1-8fd8-af5dda670103 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.791994] env[68285]: INFO nova.compute.manager [-] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Took 1.05 seconds to deallocate network for instance. [ 1129.800595] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bcca84-8d8c-41ea-81ea-a31e645c7795 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.804096] env[68285]: DEBUG oslo_vmware.api [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for the task: (returnval){ [ 1129.804096] env[68285]: value = "task-2892048" [ 1129.804096] env[68285]: _type = "Task" [ 1129.804096] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.811796] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3229ca96-eb0a-4306-b229-32b685b3e9b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.817993] env[68285]: DEBUG oslo_vmware.api [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2892048, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.851131] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c178e720-64b2-46a0-a853-222970516933 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.858039] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52852eda-68c0-7126-a81a-92e38e637622, 'name': SearchDatastore_Task, 'duration_secs': 0.010609} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.858359] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.858780] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 15fd3159-0fff-461d-96ce-f8cfc04eff32/15fd3159-0fff-461d-96ce-f8cfc04eff32.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.859115] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e61f4503-adc6-46f6-a372-3e9baa4880b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.865106] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43b037e-0718-4b57-a89b-990647f28577 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.870968] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1129.870968] env[68285]: value = "task-2892049" [ 1129.870968] env[68285]: _type = "Task" [ 1129.870968] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.886791] env[68285]: DEBUG nova.compute.provider_tree [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.893989] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.275993] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-3094ed52-33c2-40ff-ac77-6bb975a2f681" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.276146] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-3094ed52-33c2-40ff-ac77-6bb975a2f681" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.276227] env[68285]: DEBUG nova.network.neutron [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1130.299499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.321827] env[68285]: DEBUG oslo_vmware.api [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Task: {'id': task-2892048, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208794} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.322280] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.322577] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1130.323233] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1130.323518] env[68285]: INFO nova.compute.manager [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1130.324094] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.324409] env[68285]: DEBUG nova.compute.manager [-] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1130.324559] env[68285]: DEBUG nova.network.neutron [-] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1130.381288] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508559} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.381577] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 15fd3159-0fff-461d-96ce-f8cfc04eff32/15fd3159-0fff-461d-96ce-f8cfc04eff32.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1130.381780] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1130.382703] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06adda88-ec44-49d0-b015-47dcb8e6133d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.388407] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1130.388407] env[68285]: value = "task-2892050" [ 1130.388407] env[68285]: _type = "Task" [ 1130.388407] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.394259] env[68285]: DEBUG nova.scheduler.client.report [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.406473] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892050, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.432677] env[68285]: DEBUG nova.network.neutron [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updated VIF entry in instance network info cache for port 569e9535-6252-4998-9567-e57ffca9a73b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1130.433042] env[68285]: DEBUG nova.network.neutron [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updating instance_info_cache with network_info: [{"id": "569e9535-6252-4998-9567-e57ffca9a73b", "address": "fa:16:3e:aa:36:4e", "network": {"id": "43282131-363f-42f6-b208-74cfe0d8a7c2", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-166704782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5fd7bc7649b647939584cc01c1f3b5d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap569e9535-62", "ovs_interfaceid": "569e9535-6252-4998-9567-e57ffca9a73b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.824629] env[68285]: DEBUG nova.network.neutron [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1130.898794] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892050, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.255323} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.899073] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1130.899896] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdffdff-37fe-47f5-b9db-5daaf40b6c86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.903025] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.958s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.904999] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.722s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.906418] env[68285]: INFO nova.compute.claims [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.931749] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 15fd3159-0fff-461d-96ce-f8cfc04eff32/15fd3159-0fff-461d-96ce-f8cfc04eff32.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.935452] env[68285]: INFO nova.scheduler.client.report [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Deleted allocations for instance d025b807-fda4-4aff-beac-0ad6a092fe74 [ 1130.943334] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e20df18d-c131-44cf-8ee2-d7206a59d130 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.959453] env[68285]: DEBUG oslo_concurrency.lockutils [req-5db2ad30-8a93-4d8d-aea5-903a3c1980a6 req-1bd9d708-908f-4aeb-a844-c47e477e236e service nova] Releasing lock "refresh_cache-7790f1e6-c73f-40d6-97af-00e9c518a09c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.966811] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1130.966811] env[68285]: value = "task-2892051" [ 1130.966811] env[68285]: _type = "Task" [ 1130.966811] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.977331] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892051, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.012570] env[68285]: DEBUG nova.network.neutron [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Updating instance_info_cache with network_info: [{"id": "8043e9aa-9cf1-40a1-b2aa-45573789ace9", "address": "fa:16:3e:dd:07:84", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8043e9aa-9c", "ovs_interfaceid": "8043e9aa-9cf1-40a1-b2aa-45573789ace9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.283942] env[68285]: DEBUG nova.network.neutron [-] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.445904] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea2e1834-e148-4edf-85de-883d4719b6a4 tempest-MultipleCreateTestJSON-1002330172 tempest-MultipleCreateTestJSON-1002330172-project-member] Lock "d025b807-fda4-4aff-beac-0ad6a092fe74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.037s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.477269] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892051, 'name': ReconfigVM_Task, 'duration_secs': 0.295245} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.477550] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 15fd3159-0fff-461d-96ce-f8cfc04eff32/15fd3159-0fff-461d-96ce-f8cfc04eff32.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.478191] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19005111-6f71-43f6-aa12-1dc63537b099 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.484684] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1131.484684] env[68285]: value = "task-2892052" [ 1131.484684] env[68285]: _type = "Task" [ 1131.484684] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.491934] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892052, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.514832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-3094ed52-33c2-40ff-ac77-6bb975a2f681" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.515132] env[68285]: DEBUG nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Instance network_info: |[{"id": "8043e9aa-9cf1-40a1-b2aa-45573789ace9", "address": "fa:16:3e:dd:07:84", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8043e9aa-9c", "ovs_interfaceid": "8043e9aa-9cf1-40a1-b2aa-45573789ace9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1131.515602] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:07:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8043e9aa-9cf1-40a1-b2aa-45573789ace9', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1131.523309] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1131.523501] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1131.523706] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a04adcc-b1bf-4895-867b-d5c36c27d9fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.544809] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1131.544809] env[68285]: value = "task-2892053" [ 1131.544809] env[68285]: _type = "Task" [ 1131.544809] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.552235] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892053, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.566818] env[68285]: DEBUG nova.compute.manager [req-159d8e46-555b-45db-a866-cd19968ec2f1 req-c389c676-ec1f-4ad0-9308-c7bdc6d96da8 service nova] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Received event network-vif-deleted-407962a9-eb0f-4437-a1b8-4513d48c09a0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1131.710940] env[68285]: DEBUG nova.compute.manager [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Received event network-changed-8043e9aa-9cf1-40a1-b2aa-45573789ace9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1131.711111] env[68285]: DEBUG nova.compute.manager [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Refreshing instance network info cache due to event network-changed-8043e9aa-9cf1-40a1-b2aa-45573789ace9. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1131.711360] env[68285]: DEBUG oslo_concurrency.lockutils [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] Acquiring lock "refresh_cache-3094ed52-33c2-40ff-ac77-6bb975a2f681" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.711508] env[68285]: DEBUG oslo_concurrency.lockutils [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] Acquired lock "refresh_cache-3094ed52-33c2-40ff-ac77-6bb975a2f681" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.711667] env[68285]: DEBUG nova.network.neutron [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Refreshing network info cache for port 8043e9aa-9cf1-40a1-b2aa-45573789ace9 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1131.786897] env[68285]: INFO nova.compute.manager [-] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Took 1.46 seconds to deallocate network for instance. [ 1131.798713] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "9c190abd-23ee-4e8e-8b91-9050847581d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.798713] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.995267] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892052, 'name': Rename_Task, 'duration_secs': 0.207858} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.995567] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.995796] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6af2672-606a-4cbc-92ba-df5d19766713 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.001312] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1132.001312] env[68285]: value = "task-2892054" [ 1132.001312] env[68285]: _type = "Task" [ 1132.001312] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.011069] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892054, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.057386] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892053, 'name': CreateVM_Task, 'duration_secs': 0.368777} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.060857] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1132.062244] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.062543] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.062944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1132.063317] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-296dbe9e-9c57-474e-a6fc-133ed6e36f55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.067979] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1132.067979] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528ab059-f7cb-63f5-679a-1629118bcba9" [ 1132.067979] env[68285]: _type = "Task" [ 1132.067979] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.076872] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528ab059-f7cb-63f5-679a-1629118bcba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.233600] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5873d97-8408-49d7-b7fb-74a29bc34580 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.243702] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36deeb9d-082a-4b23-ab33-4bbddc6f1860 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.277501] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c2d968-6fb9-4370-b372-55022c462030 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.287609] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f070097-73f3-4157-b17b-9542f9100761 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.293460] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.305204] env[68285]: DEBUG nova.compute.utils [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1132.306732] env[68285]: DEBUG nova.compute.provider_tree [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.512050] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892054, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.580345] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528ab059-f7cb-63f5-679a-1629118bcba9, 'name': SearchDatastore_Task, 'duration_secs': 0.01145} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.580601] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.580832] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1132.581069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.581212] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.581386] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.581662] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-253e410b-444a-4fb3-b5c5-216e951900ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.590522] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.590768] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1132.591730] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebe3d533-f215-4bdc-9940-146ffaa5103b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.598640] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1132.598640] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527c9361-a2ba-ad4e-dfd6-1740200a32fb" [ 1132.598640] env[68285]: _type = "Task" [ 1132.598640] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.599537] env[68285]: DEBUG nova.network.neutron [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Updated VIF entry in instance network info cache for port 8043e9aa-9cf1-40a1-b2aa-45573789ace9. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1132.599827] env[68285]: DEBUG nova.network.neutron [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Updating instance_info_cache with network_info: [{"id": "8043e9aa-9cf1-40a1-b2aa-45573789ace9", "address": "fa:16:3e:dd:07:84", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8043e9aa-9c", "ovs_interfaceid": "8043e9aa-9cf1-40a1-b2aa-45573789ace9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.612032] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527c9361-a2ba-ad4e-dfd6-1740200a32fb, 'name': SearchDatastore_Task, 'duration_secs': 0.009079} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.613592] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-320bb55c-f110-4d77-91fd-646d42262295 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.619476] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1132.619476] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a8b2d0-5826-6703-9772-5ca349b57a4f" [ 1132.619476] env[68285]: _type = "Task" [ 1132.619476] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.628705] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a8b2d0-5826-6703-9772-5ca349b57a4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.809646] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.810715] env[68285]: DEBUG nova.scheduler.client.report [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.013026] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892054, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.107294] env[68285]: DEBUG oslo_concurrency.lockutils [req-f3598196-f622-43c4-be2f-924d7470dc2a req-90a860cd-126c-4eb1-aab6-12c2c0ffe3c6 service nova] Releasing lock "refresh_cache-3094ed52-33c2-40ff-ac77-6bb975a2f681" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.130753] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a8b2d0-5826-6703-9772-5ca349b57a4f, 'name': SearchDatastore_Task, 'duration_secs': 0.009731} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.130753] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.130975] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 3094ed52-33c2-40ff-ac77-6bb975a2f681/3094ed52-33c2-40ff-ac77-6bb975a2f681.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1133.131186] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4427d5d-0971-4f11-8982-3af8f8996025 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.137742] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1133.137742] env[68285]: value = "task-2892055" [ 1133.137742] env[68285]: _type = "Task" [ 1133.137742] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.147268] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.316509] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.318142] env[68285]: DEBUG nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1133.321914] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.940s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.321914] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.324391] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 22.047s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.370117] env[68285]: INFO nova.scheduler.client.report [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted allocations for instance 34aeba05-804e-444c-8e58-69c7721b10b1 [ 1133.516428] env[68285]: DEBUG oslo_vmware.api [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892054, 'name': PowerOnVM_Task, 'duration_secs': 1.30942} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.516428] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1133.516428] env[68285]: INFO nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1133.516428] env[68285]: DEBUG nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1133.516428] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8021324a-093e-4de2-af30-10528c07134a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.648992] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892055, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.836020] env[68285]: DEBUG nova.compute.utils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1133.836020] env[68285]: DEBUG nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1133.836020] env[68285]: DEBUG nova.network.neutron [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1133.880192] env[68285]: DEBUG nova.policy [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fd0582abf8e4fff8e6f8316ba430988', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07b5865cc5804d8d98073e5d0c1449aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1133.882311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25993eac-9b71-4756-a20e-58d94117f408 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "34aeba05-804e-444c-8e58-69c7721b10b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.325s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.912840] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "9c190abd-23ee-4e8e-8b91-9050847581d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.913269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.913349] env[68285]: INFO nova.compute.manager [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Attaching volume 91d9331a-255e-4596-9535-7bf73c4b34d3 to /dev/sdb [ 1133.967198] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0080491b-9cd0-475b-8ffd-32350b44c938 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.975303] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613e926e-aada-4239-abe7-0421780f895b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.990049] env[68285]: DEBUG nova.virt.block_device [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updating existing volume attachment record: 114e7440-16f3-475b-9a9b-a5154c4cedcf {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1134.037090] env[68285]: INFO nova.compute.manager [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Took 35.18 seconds to build instance. [ 1134.097780] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "589d1560-9269-4de2-bd79-454ebdaa40d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.098056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.148197] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.214298] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b7ddad-e7e0-41fa-a1cf-d6af07f7308f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.223919] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7e275c-ff7f-436c-b1fc-20d8a35afb5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.256818] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c377fa70-08b8-4fc0-9ea8-b9dd4d1bb51d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.265477] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89b17f4-8031-4b27-b40d-3b3aa94370bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.285485] env[68285]: DEBUG nova.compute.provider_tree [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.339527] env[68285]: DEBUG nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1134.422098] env[68285]: DEBUG nova.network.neutron [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Successfully created port: a4826873-4993-493d-8964-49f7a6cd44f9 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1134.496766] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.497253] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.540667] env[68285]: DEBUG oslo_concurrency.lockutils [None req-17565f94-47d6-4a88-a524-59c97be5f483 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.687s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.604699] env[68285]: DEBUG nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1134.651132] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.789141] env[68285]: DEBUG nova.scheduler.client.report [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.990015] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.990279] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.000065] env[68285]: DEBUG nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1135.133735] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "15fd3159-0fff-461d-96ce-f8cfc04eff32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.133980] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.134258] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "15fd3159-0fff-461d-96ce-f8cfc04eff32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.134471] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.134646] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.136697] env[68285]: INFO nova.compute.manager [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Terminating instance [ 1135.142267] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.152641] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892055, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.571226} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.152876] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 3094ed52-33c2-40ff-ac77-6bb975a2f681/3094ed52-33c2-40ff-ac77-6bb975a2f681.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1135.154052] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1135.154052] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-823a6086-7339-4af9-9ea0-b43315d372e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.161623] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1135.161623] env[68285]: value = "task-2892059" [ 1135.161623] env[68285]: _type = "Task" [ 1135.161623] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.172402] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892059, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.351191] env[68285]: DEBUG nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1135.380850] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e328c119ab1fd9f000096bc44756623e',container_format='bare',created_at=2025-03-10T15:56:10Z,direct_url=,disk_format='vmdk',id=16df92d7-466a-491d-b247-71c140d9d824,min_disk=1,min_ram=0,name='tempest-test-snap-1575726844',owner='07b5865cc5804d8d98073e5d0c1449aa',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-10T15:56:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1135.381088] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.381277] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1135.381486] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.381662] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1135.381812] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1135.382206] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1135.382447] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1135.382637] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1135.382803] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1135.382994] env[68285]: DEBUG nova.virt.hardware [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1135.383918] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71906a73-6679-434c-ae16-d9045df2e2b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.393402] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c2cea8-31b1-4eba-9152-439c03e1d207 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.493983] env[68285]: DEBUG nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1135.532113] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.648301] env[68285]: DEBUG nova.compute.manager [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1135.648530] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1135.649961] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b5c6b6-7a54-486a-8af3-4cbeff7b7e4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.658527] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1135.658797] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edffbe87-4df4-4c7e-b67a-3e035e49a78f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.666905] env[68285]: DEBUG oslo_vmware.api [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1135.666905] env[68285]: value = "task-2892060" [ 1135.666905] env[68285]: _type = "Task" [ 1135.666905] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.677029] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892059, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075931} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.677029] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.677816] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba790f7-7a4e-4b96-8236-99b08bfd487e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.684244] env[68285]: DEBUG oslo_vmware.api [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.707055] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 3094ed52-33c2-40ff-ac77-6bb975a2f681/3094ed52-33c2-40ff-ac77-6bb975a2f681.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.707341] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31b3f75d-a88a-4f55-8d07-46d6416b5d50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.731758] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1135.731758] env[68285]: value = "task-2892061" [ 1135.731758] env[68285]: _type = "Task" [ 1135.731758] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.741685] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892061, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.798377] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.474s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.803477] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.825s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.803742] env[68285]: DEBUG nova.objects.instance [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lazy-loading 'resources' on Instance uuid 8ebbf943-2cef-4c99-a1c4-b1d213fd9884 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.022123] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.177973] env[68285]: DEBUG oslo_vmware.api [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892060, 'name': PowerOffVM_Task, 'duration_secs': 0.198894} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.178766] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.179371] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1136.182094] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23ed74e5-8a2d-4f90-8ffc-542c61be4ea4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.250785] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892061, 'name': ReconfigVM_Task, 'duration_secs': 0.339227} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.253864] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 3094ed52-33c2-40ff-ac77-6bb975a2f681/3094ed52-33c2-40ff-ac77-6bb975a2f681.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.253864] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1136.253864] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1136.253864] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleting the datastore file [datastore1] 15fd3159-0fff-461d-96ce-f8cfc04eff32 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.253864] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0a3eda8-6456-4ceb-aa66-029bcf756b75 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.256977] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b25a60ff-d992-4389-89a2-3d50d1624498 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.268396] env[68285]: DEBUG oslo_vmware.api [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1136.268396] env[68285]: value = "task-2892064" [ 1136.268396] env[68285]: _type = "Task" [ 1136.268396] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.268396] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1136.268396] env[68285]: value = "task-2892063" [ 1136.268396] env[68285]: _type = "Task" [ 1136.268396] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.015992] env[68285]: DEBUG nova.network.neutron [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Successfully updated port: a4826873-4993-493d-8964-49f7a6cd44f9 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1137.022626] env[68285]: DEBUG nova.compute.manager [req-4190b9f2-4129-47a2-9af9-6df48e729294 req-28b4b314-239a-4cbb-8ce0-a53fca7c01f4 service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Received event network-vif-plugged-a4826873-4993-493d-8964-49f7a6cd44f9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1137.022859] env[68285]: DEBUG oslo_concurrency.lockutils [req-4190b9f2-4129-47a2-9af9-6df48e729294 req-28b4b314-239a-4cbb-8ce0-a53fca7c01f4 service nova] Acquiring lock "ce780600-5dc9-4a60-b54e-415cd1766ffb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.023057] env[68285]: DEBUG oslo_concurrency.lockutils [req-4190b9f2-4129-47a2-9af9-6df48e729294 req-28b4b314-239a-4cbb-8ce0-a53fca7c01f4 service nova] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.023237] env[68285]: DEBUG oslo_concurrency.lockutils [req-4190b9f2-4129-47a2-9af9-6df48e729294 req-28b4b314-239a-4cbb-8ce0-a53fca7c01f4 service nova] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.023401] env[68285]: DEBUG nova.compute.manager [req-4190b9f2-4129-47a2-9af9-6df48e729294 req-28b4b314-239a-4cbb-8ce0-a53fca7c01f4 service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] No waiting events found dispatching network-vif-plugged-a4826873-4993-493d-8964-49f7a6cd44f9 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1137.023559] env[68285]: WARNING nova.compute.manager [req-4190b9f2-4129-47a2-9af9-6df48e729294 req-28b4b314-239a-4cbb-8ce0-a53fca7c01f4 service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Received unexpected event network-vif-plugged-a4826873-4993-493d-8964-49f7a6cd44f9 for instance with vm_state building and task_state spawning. [ 1137.036413] env[68285]: DEBUG oslo_vmware.api [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.487459} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.036656] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892063, 'name': Rename_Task, 'duration_secs': 0.159161} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.037411] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.037652] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.038480] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.038480] env[68285]: INFO nova.compute.manager [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Took 1.39 seconds to destroy the instance on the hypervisor. [ 1137.038480] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1137.038480] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1137.039180] env[68285]: DEBUG nova.compute.manager [-] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1137.039180] env[68285]: DEBUG nova.network.neutron [-] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1137.040704] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5289638c-501a-4d47-97d2-b276c3f53f2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.048246] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1137.048246] env[68285]: value = "task-2892066" [ 1137.048246] env[68285]: _type = "Task" [ 1137.048246] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.060689] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892066, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.098910] env[68285]: INFO nova.scheduler.client.report [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted allocation for migration cfcece04-fb70-4f24-94cf-cf4bae5a06fb [ 1137.407020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fe43bb-2c58-46ef-a17a-1ae1f85ea016 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.421717] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7625b4b2-c7c9-4e10-a78f-60b475284dee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.455677] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963b06d9-8af5-42a5-b33e-6ab4bee26b57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.465127] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984ceb58-de62-4c8d-b9f1-4092a4050822 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.481121] env[68285]: DEBUG nova.compute.provider_tree [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.526066] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "refresh_cache-ce780600-5dc9-4a60-b54e-415cd1766ffb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.526308] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "refresh_cache-ce780600-5dc9-4a60-b54e-415cd1766ffb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.527095] env[68285]: DEBUG nova.network.neutron [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.557568] env[68285]: DEBUG oslo_vmware.api [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892066, 'name': PowerOnVM_Task, 'duration_secs': 0.48465} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.557836] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1137.558268] env[68285]: INFO nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Took 8.53 seconds to spawn the instance on the hypervisor. [ 1137.558268] env[68285]: DEBUG nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.559050] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478b9b08-6f5c-4fb7-8782-7cb6e661cfad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.607625] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32110685-856e-4465-9bf4-9b282a6851e9 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 29.732s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.918544] env[68285]: DEBUG nova.network.neutron [-] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.984225] env[68285]: DEBUG nova.scheduler.client.report [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.059429] env[68285]: DEBUG nova.network.neutron [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1138.075969] env[68285]: INFO nova.compute.manager [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Took 36.76 seconds to build instance. [ 1138.188325] env[68285]: DEBUG nova.network.neutron [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Updating instance_info_cache with network_info: [{"id": "a4826873-4993-493d-8964-49f7a6cd44f9", "address": "fa:16:3e:10:b9:6b", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4826873-49", "ovs_interfaceid": "a4826873-4993-493d-8964-49f7a6cd44f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.381136] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.381406] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.422532] env[68285]: INFO nova.compute.manager [-] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Took 1.38 seconds to deallocate network for instance. [ 1138.490714] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.687s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.493253] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.423s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.497539] env[68285]: DEBUG nova.objects.instance [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'resources' on Instance uuid 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.521910] env[68285]: INFO nova.scheduler.client.report [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Deleted allocations for instance 8ebbf943-2cef-4c99-a1c4-b1d213fd9884 [ 1138.544929] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1138.545186] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1138.546059] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0a9d2d-106d-4a06-bbd6-387720b55b6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.563325] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc57a6f-4d96-469a-8fd2-a546324b43d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.568306] env[68285]: DEBUG nova.compute.manager [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Received event network-changed-a4826873-4993-493d-8964-49f7a6cd44f9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1138.568539] env[68285]: DEBUG nova.compute.manager [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Refreshing instance network info cache due to event network-changed-a4826873-4993-493d-8964-49f7a6cd44f9. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1138.568672] env[68285]: DEBUG oslo_concurrency.lockutils [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] Acquiring lock "refresh_cache-ce780600-5dc9-4a60-b54e-415cd1766ffb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.585381] env[68285]: DEBUG oslo_concurrency.lockutils [None req-52cb0d77-0cbd-4882-9bf8-ce229ab22776 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.283s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.593312] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] volume-91d9331a-255e-4596-9535-7bf73c4b34d3/volume-91d9331a-255e-4596-9535-7bf73c4b34d3.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.594657] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71dcf6be-2f4f-4071-8991-8c6ab7b59baf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.616750] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1138.616750] env[68285]: value = "task-2892067" [ 1138.616750] env[68285]: _type = "Task" [ 1138.616750] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.628026] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892067, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.693255] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "refresh_cache-ce780600-5dc9-4a60-b54e-415cd1766ffb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.693255] env[68285]: DEBUG nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Instance network_info: |[{"id": "a4826873-4993-493d-8964-49f7a6cd44f9", "address": "fa:16:3e:10:b9:6b", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4826873-49", "ovs_interfaceid": "a4826873-4993-493d-8964-49f7a6cd44f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1138.693255] env[68285]: DEBUG oslo_concurrency.lockutils [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] Acquired lock "refresh_cache-ce780600-5dc9-4a60-b54e-415cd1766ffb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.693255] env[68285]: DEBUG nova.network.neutron [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Refreshing network info cache for port a4826873-4993-493d-8964-49f7a6cd44f9 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1138.694107] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:b9:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4826873-4993-493d-8964-49f7a6cd44f9', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.702812] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.703092] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.703316] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ffdb2a6-5e13-4667-89cd-044f17b192c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.726652] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.726652] env[68285]: value = "task-2892068" [ 1138.726652] env[68285]: _type = "Task" [ 1138.726652] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.735094] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892068, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.884183] env[68285]: DEBUG nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1138.930613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.034016] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a2780b5-43ed-4612-9ab4-aeccdb0f953d tempest-ImagesOneServerNegativeTestJSON-1263119992 tempest-ImagesOneServerNegativeTestJSON-1263119992-project-member] Lock "8ebbf943-2cef-4c99-a1c4-b1d213fd9884" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.673s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.112693] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.112951] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.113173] env[68285]: DEBUG nova.compute.manager [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.114427] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc53921-1785-489d-a11d-0b4f3884dd24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.126437] env[68285]: DEBUG nova.compute.manager [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1139.127488] env[68285]: DEBUG nova.objects.instance [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'flavor' on Instance uuid 3094ed52-33c2-40ff-ac77-6bb975a2f681 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.133870] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892067, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.238080] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892068, 'name': CreateVM_Task, 'duration_secs': 0.366457} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.240176] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1139.241132] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.241199] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.241588] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1139.241850] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b99f756-5d17-449e-8e82-4595162d6bbb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.248143] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1139.248143] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5204e631-e947-36c6-ba09-f3a26aa0eaee" [ 1139.248143] env[68285]: _type = "Task" [ 1139.248143] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.255701] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5204e631-e947-36c6-ba09-f3a26aa0eaee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.303609] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6e8687-4bdc-47c8-b6f3-ef874fbf5cfa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.311311] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eded62e-6eea-4da7-9f88-67dc3b6f4d00 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.344665] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a062415b-c89e-4c35-8d00-63dc7060a25e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.352862] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5170f549-bfe2-46fa-b870-3288195f0540 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.366475] env[68285]: DEBUG nova.compute.provider_tree [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.406925] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.430152] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "1f5fe064-0443-4b7f-911a-45d803836eeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.430466] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.430643] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.430840] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.431070] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.433119] env[68285]: INFO nova.compute.manager [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Terminating instance [ 1139.541050] env[68285]: DEBUG nova.network.neutron [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Updated VIF entry in instance network info cache for port a4826873-4993-493d-8964-49f7a6cd44f9. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1139.541463] env[68285]: DEBUG nova.network.neutron [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Updating instance_info_cache with network_info: [{"id": "a4826873-4993-493d-8964-49f7a6cd44f9", "address": "fa:16:3e:10:b9:6b", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4826873-49", "ovs_interfaceid": "a4826873-4993-493d-8964-49f7a6cd44f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.629155] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892067, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.758062] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.758322] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Processing image 16df92d7-466a-491d-b247-71c140d9d824 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1139.758566] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824/16df92d7-466a-491d-b247-71c140d9d824.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.758865] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824/16df92d7-466a-491d-b247-71c140d9d824.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.758865] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.759137] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aee5587-0de4-4db5-b3f4-adab15348309 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.767037] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.767608] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1139.767867] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3501c56d-59d7-42ce-bd14-d6fea024e86b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.772712] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1139.772712] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5230a485-1084-5fcd-f2ac-c9e9b720adc5" [ 1139.772712] env[68285]: _type = "Task" [ 1139.772712] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.780279] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5230a485-1084-5fcd-f2ac-c9e9b720adc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.869522] env[68285]: DEBUG nova.scheduler.client.report [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.937179] env[68285]: DEBUG nova.compute.manager [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1139.937506] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1139.938425] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c5e0df-b139-4fc2-bec5-e3f88dc80192 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.948081] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.948297] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43947a39-023e-4af1-92e6-b91d7e5b40dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.954998] env[68285]: DEBUG oslo_vmware.api [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1139.954998] env[68285]: value = "task-2892069" [ 1139.954998] env[68285]: _type = "Task" [ 1139.954998] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.962952] env[68285]: DEBUG oslo_vmware.api [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.044279] env[68285]: DEBUG oslo_concurrency.lockutils [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] Releasing lock "refresh_cache-ce780600-5dc9-4a60-b54e-415cd1766ffb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.044774] env[68285]: DEBUG nova.compute.manager [req-9703670d-eed2-42d1-9aaf-802bb8246677 req-536968fe-1410-4305-84f8-af4f02f205ab service nova] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Received event network-vif-deleted-fe791a09-7a4b-45f2-aaa7-a87d7393bf19 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.134401] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892067, 'name': ReconfigVM_Task, 'duration_secs': 1.09266} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.134401] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfigured VM instance instance-00000049 to attach disk [datastore1] volume-91d9331a-255e-4596-9535-7bf73c4b34d3/volume-91d9331a-255e-4596-9535-7bf73c4b34d3.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.138616] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-232c9c70-c0ae-408b-aca6-3b74cfd647db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.153570] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.154125] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec71e437-576e-4090-a753-6d3aee4738cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.163426] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.163672] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.164832] env[68285]: DEBUG oslo_vmware.api [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1140.164832] env[68285]: value = "task-2892070" [ 1140.164832] env[68285]: _type = "Task" [ 1140.164832] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.165295] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1140.165295] env[68285]: value = "task-2892071" [ 1140.165295] env[68285]: _type = "Task" [ 1140.165295] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.178840] env[68285]: DEBUG oslo_vmware.api [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.183299] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.284655] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Preparing fetch location {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1140.284958] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Fetch image to [datastore1] OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa/OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa.vmdk {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1140.285190] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Downloading stream optimized image 16df92d7-466a-491d-b247-71c140d9d824 to [datastore1] OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa/OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa.vmdk on the data store datastore1 as vApp {{(pid=68285) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1140.285400] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Downloading image file data 16df92d7-466a-491d-b247-71c140d9d824 to the ESX as VM named 'OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa' {{(pid=68285) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1140.368901] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1140.368901] env[68285]: value = "resgroup-9" [ 1140.368901] env[68285]: _type = "ResourcePool" [ 1140.368901] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1140.368901] env[68285]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-20f0287c-c8fe-4f28-9969-f61e032c98fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.384433] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.386526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.409s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.387978] env[68285]: INFO nova.compute.claims [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1140.396186] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease: (returnval){ [ 1140.396186] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525fa0ce-589e-800d-bc14-154f8c4a8e69" [ 1140.396186] env[68285]: _type = "HttpNfcLease" [ 1140.396186] env[68285]: } obtained for vApp import into resource pool (val){ [ 1140.396186] env[68285]: value = "resgroup-9" [ 1140.396186] env[68285]: _type = "ResourcePool" [ 1140.396186] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1140.396451] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the lease: (returnval){ [ 1140.396451] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525fa0ce-589e-800d-bc14-154f8c4a8e69" [ 1140.396451] env[68285]: _type = "HttpNfcLease" [ 1140.396451] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1140.403588] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1140.403588] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525fa0ce-589e-800d-bc14-154f8c4a8e69" [ 1140.403588] env[68285]: _type = "HttpNfcLease" [ 1140.403588] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1140.407058] env[68285]: INFO nova.scheduler.client.report [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted allocations for instance 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11 [ 1140.466841] env[68285]: DEBUG oslo_vmware.api [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892069, 'name': PowerOffVM_Task, 'duration_secs': 0.237678} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.467770] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1140.467951] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1140.468236] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a25e7ab1-4d49-49ac-8eeb-677ed4a8ee64 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.532332] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1140.532726] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1140.532978] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleting the datastore file [datastore1] 1f5fe064-0443-4b7f-911a-45d803836eeb {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1140.533269] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62046df4-367b-48bc-8f82-37917a59352c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.539864] env[68285]: DEBUG oslo_vmware.api [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1140.539864] env[68285]: value = "task-2892074" [ 1140.539864] env[68285]: _type = "Task" [ 1140.539864] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.547381] env[68285]: DEBUG oslo_vmware.api [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.666841] env[68285]: DEBUG nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1140.681715] env[68285]: DEBUG oslo_vmware.api [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892071, 'name': ReconfigVM_Task, 'duration_secs': 0.164599} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.685312] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1140.687260] env[68285]: DEBUG oslo_vmware.api [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892070, 'name': PowerOffVM_Task, 'duration_secs': 0.197328} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.687706] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1140.687902] env[68285]: DEBUG nova.compute.manager [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.688702] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab550b7-a26f-4b1d-a887-7b4396c5c567 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.904802] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1140.904802] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525fa0ce-589e-800d-bc14-154f8c4a8e69" [ 1140.904802] env[68285]: _type = "HttpNfcLease" [ 1140.904802] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1140.917431] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4b1d8be-19c1-4033-8172-1db44071eaf7 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "1a7d1cfc-67a5-4178-9bc2-eb8af5104d11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.603s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.050103] env[68285]: DEBUG oslo_vmware.api [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168391} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.050372] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1141.050562] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1141.050881] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1141.050922] env[68285]: INFO nova.compute.manager [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1141.051186] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.051387] env[68285]: DEBUG nova.compute.manager [-] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1141.051479] env[68285]: DEBUG nova.network.neutron [-] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1141.203067] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c98bb814-00b3-41c4-a470-df4be04718d1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.089s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.210640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.407487] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1141.407487] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525fa0ce-589e-800d-bc14-154f8c4a8e69" [ 1141.407487] env[68285]: _type = "HttpNfcLease" [ 1141.407487] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1141.408951] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1141.408951] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525fa0ce-589e-800d-bc14-154f8c4a8e69" [ 1141.408951] env[68285]: _type = "HttpNfcLease" [ 1141.408951] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1141.409506] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d77d44d-dcd8-4562-9f95-9142e5caab2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.422136] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523767c4-1e7f-0cbd-43b4-15190347931d/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1141.422136] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523767c4-1e7f-0cbd-43b4-15190347931d/disk-0.vmdk. {{(pid=68285) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1141.490493] env[68285]: DEBUG nova.compute.manager [req-de52aa95-4737-4983-8b59-ad35a14c9ecd req-1b9e185e-7f48-4210-a84c-582195b817d4 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Received event network-vif-deleted-025d1a2b-ae65-4a5c-a90f-66fabc72e11c {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.490493] env[68285]: INFO nova.compute.manager [req-de52aa95-4737-4983-8b59-ad35a14c9ecd req-1b9e185e-7f48-4210-a84c-582195b817d4 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Neutron deleted interface 025d1a2b-ae65-4a5c-a90f-66fabc72e11c; detaching it from the instance and deleting it from the info cache [ 1141.491038] env[68285]: DEBUG nova.network.neutron [req-de52aa95-4737-4983-8b59-ad35a14c9ecd req-1b9e185e-7f48-4210-a84c-582195b817d4 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.503793] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1d6957fa-b282-4e41-b26d-50c338537b3c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.603520] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.603786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.603989] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "3094ed52-33c2-40ff-ac77-6bb975a2f681-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.604199] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.604367] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.608557] env[68285]: INFO nova.compute.manager [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Terminating instance [ 1141.734131] env[68285]: DEBUG nova.objects.instance [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'flavor' on Instance uuid 9c190abd-23ee-4e8e-8b91-9050847581d5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.810034] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8036b6-a49f-4822-a4d8-9490a89d5139 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.821659] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf194c6-bd6a-4e2f-b33d-728c2be7220f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.864191] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e098ce82-e71c-490b-9bcd-92aa5adf97c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.871596] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2f61f0-5d09-4343-b206-2c238c1addb2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.890456] env[68285]: DEBUG nova.compute.provider_tree [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.989952] env[68285]: DEBUG nova.network.neutron [-] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.997203] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c1fabbb-3e3d-4399-9686-0b0a556e7bab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.009553] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737d0fb8-4b72-421e-b9ef-8881cc1912e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.055517] env[68285]: DEBUG nova.compute.manager [req-de52aa95-4737-4983-8b59-ad35a14c9ecd req-1b9e185e-7f48-4210-a84c-582195b817d4 service nova] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Detach interface failed, port_id=025d1a2b-ae65-4a5c-a90f-66fabc72e11c, reason: Instance 1f5fe064-0443-4b7f-911a-45d803836eeb could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1142.113110] env[68285]: DEBUG nova.compute.manager [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.113388] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.114275] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368bd0b5-e168-4ebe-bda1-ae6afd05c788 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.122181] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.122181] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-209417f1-0aa6-4c6d-9202-4302f0a36313 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.194256] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1142.194577] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1142.194780] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleting the datastore file [datastore1] 3094ed52-33c2-40ff-ac77-6bb975a2f681 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.195291] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf64ce2e-b4f9-4043-9616-5f2eed03a52b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.206958] env[68285]: DEBUG oslo_vmware.api [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1142.206958] env[68285]: value = "task-2892076" [ 1142.206958] env[68285]: _type = "Task" [ 1142.206958] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.217348] env[68285]: DEBUG oslo_vmware.api [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.244094] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dd0c7594-8365-44c6-9783-b4c90dcda9f1 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.330s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.275930] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Completed reading data from the image iterator. {{(pid=68285) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1142.276320] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523767c4-1e7f-0cbd-43b4-15190347931d/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1142.277730] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77da69d3-1b2f-4b95-8a40-3758224a13ca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.287796] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523767c4-1e7f-0cbd-43b4-15190347931d/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1142.288036] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523767c4-1e7f-0cbd-43b4-15190347931d/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1142.288398] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d4ac3c3a-a63b-49ed-8655-8fc9d289d562 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.397304] env[68285]: DEBUG nova.scheduler.client.report [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1142.432147] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.432432] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.469869] env[68285]: DEBUG oslo_vmware.rw_handles [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523767c4-1e7f-0cbd-43b4-15190347931d/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1142.470596] env[68285]: INFO nova.virt.vmwareapi.images [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Downloaded image file data 16df92d7-466a-491d-b247-71c140d9d824 [ 1142.471058] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa35ad25-73d5-42dc-a53e-e330d28701c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.488367] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff1317d2-9715-40ba-a528-8094cce775c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.493438] env[68285]: INFO nova.compute.manager [-] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Took 1.44 seconds to deallocate network for instance. [ 1142.511690] env[68285]: INFO nova.virt.vmwareapi.images [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] The imported VM was unregistered [ 1142.514265] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Caching image {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1142.514543] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating directory with path [datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1142.514823] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c938bf3-0561-4780-8e89-f16567ef5a17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.536607] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created directory with path [datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.536880] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa/OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa.vmdk to [datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824/16df92d7-466a-491d-b247-71c140d9d824.vmdk. {{(pid=68285) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1142.537246] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7eb2d426-702f-4a38-8671-740fea297ddd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.547253] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1142.547253] env[68285]: value = "task-2892078" [ 1142.547253] env[68285]: _type = "Task" [ 1142.547253] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.557664] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892078, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.716381] env[68285]: DEBUG oslo_vmware.api [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146793} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.716669] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.716856] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1142.717035] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1142.717208] env[68285]: INFO nova.compute.manager [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1142.717464] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1142.717634] env[68285]: DEBUG nova.compute.manager [-] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1142.717726] env[68285]: DEBUG nova.network.neutron [-] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1142.744443] env[68285]: INFO nova.compute.manager [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Rebuilding instance [ 1142.791764] env[68285]: DEBUG nova.compute.manager [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1142.793852] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b91b4ca-4e1f-40c1-bbc2-4f63ae853c95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.902244] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.902959] env[68285]: DEBUG nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1142.905844] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.608s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.906064] env[68285]: DEBUG nova.objects.instance [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1142.935635] env[68285]: INFO nova.compute.manager [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Detaching volume 729629cd-cafe-4baf-9474-cba7083d3a6a [ 1142.982909] env[68285]: INFO nova.virt.block_device [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Attempting to driver detach volume 729629cd-cafe-4baf-9474-cba7083d3a6a from mountpoint /dev/sdb [ 1142.983250] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1142.983973] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580937', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'name': 'volume-729629cd-cafe-4baf-9474-cba7083d3a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f20336-9c29-4aac-8c0d-f577749cd7d7', 'attached_at': '', 'detached_at': '', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'serial': '729629cd-cafe-4baf-9474-cba7083d3a6a'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1142.984456] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f079bef-9eea-4f6d-aae0-9e6324f28a44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.009529] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.011231] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac8b0da-7c03-48dd-92ed-982bd8e9bf83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.020040] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851394e6-45f2-4c31-91c2-a0e5d154cc47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.046450] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a9bc45-1398-452f-add6-cf679f12950b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.065066] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] The volume has not been displaced from its original location: [datastore2] volume-729629cd-cafe-4baf-9474-cba7083d3a6a/volume-729629cd-cafe-4baf-9474-cba7083d3a6a.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1143.072763] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Reconfiguring VM instance instance-00000030 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1143.074162] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-828017d5-3af1-454f-aee9-113ca7e2bc13 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.086987] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892078, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.093205] env[68285]: DEBUG oslo_vmware.api [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1143.093205] env[68285]: value = "task-2892079" [ 1143.093205] env[68285]: _type = "Task" [ 1143.093205] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.104438] env[68285]: DEBUG oslo_vmware.api [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892079, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.410557] env[68285]: DEBUG nova.compute.utils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1143.414920] env[68285]: DEBUG nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1143.415081] env[68285]: DEBUG nova.network.neutron [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1143.491430] env[68285]: DEBUG nova.network.neutron [-] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.562243] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892078, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.603868] env[68285]: DEBUG oslo_vmware.api [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892079, 'name': ReconfigVM_Task, 'duration_secs': 0.266148} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.605591] env[68285]: DEBUG nova.policy [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41e116b3ac9d4c7386847a5559ea313c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43491d0bdffc49eaaad084f3124cffcb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1143.607639] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Reconfigured VM instance instance-00000030 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1143.613512] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73146e2f-4e7f-44f4-b280-765ff80ffb2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.631511] env[68285]: DEBUG oslo_vmware.api [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1143.631511] env[68285]: value = "task-2892080" [ 1143.631511] env[68285]: _type = "Task" [ 1143.631511] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.639767] env[68285]: DEBUG nova.compute.manager [req-a85a2219-1797-4c26-bf3f-18b9b0f51bcd req-ff6bb780-67a3-431f-913c-93cd94c5b24a service nova] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Received event network-vif-deleted-8043e9aa-9cf1-40a1-b2aa-45573789ace9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.644853] env[68285]: DEBUG oslo_vmware.api [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.817027] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.817027] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed35c077-3744-401d-9562-7a4f3f982e1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.830033] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1143.830033] env[68285]: value = "task-2892081" [ 1143.830033] env[68285]: _type = "Task" [ 1143.830033] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.838178] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892081, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.918965] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b2c008b6-0415-4019-84c4-7fa3ebbd770c tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.920786] env[68285]: DEBUG nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1143.923928] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.288s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.924388] env[68285]: DEBUG nova.objects.instance [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lazy-loading 'resources' on Instance uuid ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.996273] env[68285]: INFO nova.compute.manager [-] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Took 1.28 seconds to deallocate network for instance. [ 1144.062407] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892078, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.144644] env[68285]: DEBUG oslo_vmware.api [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892080, 'name': ReconfigVM_Task, 'duration_secs': 0.145899} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.144644] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580937', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'name': 'volume-729629cd-cafe-4baf-9474-cba7083d3a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4f20336-9c29-4aac-8c0d-f577749cd7d7', 'attached_at': '', 'detached_at': '', 'volume_id': '729629cd-cafe-4baf-9474-cba7083d3a6a', 'serial': '729629cd-cafe-4baf-9474-cba7083d3a6a'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1144.343810] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892081, 'name': PowerOffVM_Task, 'duration_secs': 0.233516} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.345516] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.405636] env[68285]: INFO nova.compute.manager [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Detaching volume 91d9331a-255e-4596-9535-7bf73c4b34d3 [ 1144.447257] env[68285]: INFO nova.virt.block_device [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Attempting to driver detach volume 91d9331a-255e-4596-9535-7bf73c4b34d3 from mountpoint /dev/sdb [ 1144.447679] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1144.447881] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1144.448879] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3c8afd-72d9-40ef-90a3-cca1d3a91506 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.485183] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8a5cb6-4c67-4ada-a27f-9bbbfa0bf462 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.496166] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f3fa14-3c6c-4cc8-aa06-a394c4967d48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.520630] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.524340] env[68285]: DEBUG nova.network.neutron [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Successfully created port: dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1144.526922] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7181e595-d711-405c-a786-737657cfcf3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.545425] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] The volume has not been displaced from its original location: [datastore1] volume-91d9331a-255e-4596-9535-7bf73c4b34d3/volume-91d9331a-255e-4596-9535-7bf73c4b34d3.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1144.551054] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1144.554048] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01c8d949-631a-4df5-a511-0d8cb3d1a8b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.576420] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892078, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.578488] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1144.578488] env[68285]: value = "task-2892082" [ 1144.578488] env[68285]: _type = "Task" [ 1144.578488] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.590819] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892082, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.701316] env[68285]: DEBUG nova.objects.instance [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'flavor' on Instance uuid d4f20336-9c29-4aac-8c0d-f577749cd7d7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.855145] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a42d7fd-7561-45f9-b07b-ac074b5f8694 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.863621] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f7b7ec-492f-476f-9bb5-ec2ceec52465 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.900854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bd017c-203a-4754-aedf-d160ce33130c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.910026] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9b8d38-7529-4c80-997e-20d51c0835f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.925732] env[68285]: DEBUG nova.compute.provider_tree [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.938601] env[68285]: DEBUG nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1144.968736] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1144.968905] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1144.969077] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1144.969265] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1144.969409] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1144.969554] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1144.969765] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1144.969922] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1144.970235] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1144.970538] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1144.970739] env[68285]: DEBUG nova.virt.hardware [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1144.971672] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d18d3f9-1841-4548-b10f-41d48cf0a909 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.981583] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec95be60-7a3f-4237-9592-c5f72f10b931 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.077174] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892078, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.473051} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.077495] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa/OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa.vmdk to [datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824/16df92d7-466a-491d-b247-71c140d9d824.vmdk. [ 1145.077647] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Cleaning up location [datastore1] OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1145.077809] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_58f69169-30de-4bce-b3a1-2eabce5d16aa {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1145.078083] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-398ceab0-f244-404a-8aa3-bafc201eac9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.088458] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892082, 'name': ReconfigVM_Task, 'duration_secs': 0.266309} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.089931] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1145.095581] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1145.095581] env[68285]: value = "task-2892083" [ 1145.095581] env[68285]: _type = "Task" [ 1145.095581] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.095929] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a99a1042-634b-4a2e-a29c-c9cb3b7b61b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.115368] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892083, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.116951] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1145.116951] env[68285]: value = "task-2892084" [ 1145.116951] env[68285]: _type = "Task" [ 1145.116951] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.127860] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892084, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.430566] env[68285]: DEBUG nova.scheduler.client.report [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1145.617782] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892083, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109121} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.618361] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1145.618567] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824/16df92d7-466a-491d-b247-71c140d9d824.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.618824] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824/16df92d7-466a-491d-b247-71c140d9d824.vmdk to [datastore1] ce780600-5dc9-4a60-b54e-415cd1766ffb/ce780600-5dc9-4a60-b54e-415cd1766ffb.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1145.623114] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ccdc772-3d29-4edd-9ae0-f1c9c97558f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.631200] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892084, 'name': ReconfigVM_Task, 'duration_secs': 0.46146} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.632410] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1145.634539] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1145.634539] env[68285]: value = "task-2892085" [ 1145.634539] env[68285]: _type = "Task" [ 1145.634539] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.643915] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.710857] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c3963f07-6a11-4acc-b4be-8f035143c698 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.278s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.934973] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.937705] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.638s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.937943] env[68285]: DEBUG nova.objects.instance [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lazy-loading 'resources' on Instance uuid 8c299247-896d-4ff1-b73a-22a71ec972fd {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.959712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.959712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.959712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.959712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.959712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.961492] env[68285]: INFO nova.scheduler.client.report [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleted allocations for instance ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf [ 1145.962328] env[68285]: INFO nova.compute.manager [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Terminating instance [ 1146.146212] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892085, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.213071] env[68285]: DEBUG nova.compute.manager [req-23c008ef-97d2-4cb2-868d-226867fdf191 req-308a4074-0650-44f7-8241-f49cce7ab426 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Received event network-vif-plugged-dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1146.213071] env[68285]: DEBUG oslo_concurrency.lockutils [req-23c008ef-97d2-4cb2-868d-226867fdf191 req-308a4074-0650-44f7-8241-f49cce7ab426 service nova] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.213526] env[68285]: DEBUG oslo_concurrency.lockutils [req-23c008ef-97d2-4cb2-868d-226867fdf191 req-308a4074-0650-44f7-8241-f49cce7ab426 service nova] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.213526] env[68285]: DEBUG oslo_concurrency.lockutils [req-23c008ef-97d2-4cb2-868d-226867fdf191 req-308a4074-0650-44f7-8241-f49cce7ab426 service nova] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.213650] env[68285]: DEBUG nova.compute.manager [req-23c008ef-97d2-4cb2-868d-226867fdf191 req-308a4074-0650-44f7-8241-f49cce7ab426 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] No waiting events found dispatching network-vif-plugged-dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1146.215364] env[68285]: WARNING nova.compute.manager [req-23c008ef-97d2-4cb2-868d-226867fdf191 req-308a4074-0650-44f7-8241-f49cce7ab426 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Received unexpected event network-vif-plugged-dbe7fd71-a38e-450c-a4ef-497eaf455ff0 for instance with vm_state building and task_state spawning. [ 1146.400457] env[68285]: DEBUG nova.network.neutron [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Successfully updated port: dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1146.474314] env[68285]: DEBUG oslo_concurrency.lockutils [None req-430cb891-220e-4ccf-9719-1c33b7e06026 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.255s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.478543] env[68285]: DEBUG nova.compute.manager [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1146.478543] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1146.478543] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21251f3f-bb0e-4da1-aaaf-87575aed9a0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.491630] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.491966] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a268304-c612-4627-ae7c-16c3c775c82f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.500747] env[68285]: DEBUG oslo_vmware.api [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1146.500747] env[68285]: value = "task-2892086" [ 1146.500747] env[68285]: _type = "Task" [ 1146.500747] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.514949] env[68285]: DEBUG oslo_vmware.api [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.650926] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892085, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.689290] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.689625] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c061740-feb5-4dd3-823d-44121f96f660 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.701373] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1146.701373] env[68285]: value = "task-2892087" [ 1146.701373] env[68285]: _type = "Task" [ 1146.701373] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.714902] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1146.715260] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1146.715499] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1146.716396] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d17184-a1f5-4264-8553-9991a9fcaab3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.743267] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a97f14-c9fe-4d7f-b456-5e189fcd2fc8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.753815] env[68285]: WARNING nova.virt.vmwareapi.driver [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1146.754854] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1146.756031] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a840ddf-1722-425e-9bdb-1a6bafd61a43 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.765215] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1146.765481] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3ac69d9-f7dc-4646-ad35-ceb7b8fa10f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.839685] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7d25f8-5432-4e78-9296-17921704441b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.847447] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1146.847611] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1146.847696] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleting the datastore file [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1146.850029] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ace5c6a-28ed-4f08-82aa-7bb8ee652525 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.853604] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b3bc76-6fa3-4e94-9e69-2a91c84fc3d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.889744] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc3532b-88bb-4221-a774-0d35efbaa988 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.892882] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1146.892882] env[68285]: value = "task-2892089" [ 1146.892882] env[68285]: _type = "Task" [ 1146.892882] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.900463] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0436fbc9-1f38-4da0-be8a-0e04b2f78b36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.910421] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.910586] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.910707] env[68285]: DEBUG nova.network.neutron [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1146.912415] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.924326] env[68285]: DEBUG nova.compute.provider_tree [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.015412] env[68285]: DEBUG oslo_vmware.api [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892086, 'name': PowerOffVM_Task, 'duration_secs': 0.227395} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.015412] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1147.015412] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1147.015412] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3eeb2cd-c973-4c8c-838d-6bc906130ea2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.087745] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1147.088259] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1147.088535] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleting the datastore file [datastore2] d4f20336-9c29-4aac-8c0d-f577749cd7d7 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1147.088833] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7adffee3-9e7c-441b-adf5-506a9c9b5d90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.098023] env[68285]: DEBUG oslo_vmware.api [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1147.098023] env[68285]: value = "task-2892091" [ 1147.098023] env[68285]: _type = "Task" [ 1147.098023] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.107591] env[68285]: DEBUG oslo_vmware.api [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.148585] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892085, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.299642] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.299895] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.411287] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288574} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.411287] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1147.411287] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1147.411713] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1147.430696] env[68285]: DEBUG nova.scheduler.client.report [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1147.498473] env[68285]: DEBUG nova.network.neutron [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1147.613692] env[68285]: DEBUG oslo_vmware.api [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35161} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.614155] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1147.614475] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1147.614801] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1147.615133] env[68285]: INFO nova.compute.manager [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1147.615542] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1147.615883] env[68285]: DEBUG nova.compute.manager [-] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1147.616162] env[68285]: DEBUG nova.network.neutron [-] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1147.651689] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892085, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.694517] env[68285]: DEBUG nova.network.neutron [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.803012] env[68285]: DEBUG nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1147.920730] env[68285]: INFO nova.virt.block_device [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Booting with volume 91d9331a-255e-4596-9535-7bf73c4b34d3 at /dev/sdb [ 1147.938182] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.943370] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.648s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.943370] env[68285]: DEBUG nova.objects.instance [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lazy-loading 'resources' on Instance uuid f13ad5e7-341f-4475-b334-2144b0923e3b {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.973515] env[68285]: INFO nova.scheduler.client.report [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleted allocations for instance 8c299247-896d-4ff1-b73a-22a71ec972fd [ 1147.979239] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d7a3add-12e4-4fbb-9a25-05afe66e158a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.997364] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887947b4-1555-412f-9d04-04b9ba8f19c2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.042465] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ef64a99-746a-4aa2-990c-951999719283 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.053315] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc94c3b-72f2-4128-9c8a-294bbd75519e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.093754] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe5f397-c3dc-4764-8af2-4df47e11d781 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.102290] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3d6422-3293-417b-9ca0-2f862a4ee0fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.119067] env[68285]: DEBUG nova.virt.block_device [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updating existing volume attachment record: e7c4eb6c-fbe0-4bde-900c-ae482cc19524 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1148.151805] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892085, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.154987] env[68285]: DEBUG nova.compute.manager [req-c58c4ff0-5885-490d-8e10-1d707edb37b5 req-bcdd5262-dc5e-43ca-bc04-c735fce00166 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Received event network-vif-deleted-73717dde-af77-47f8-896b-24153f94b949 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.155243] env[68285]: INFO nova.compute.manager [req-c58c4ff0-5885-490d-8e10-1d707edb37b5 req-bcdd5262-dc5e-43ca-bc04-c735fce00166 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Neutron deleted interface 73717dde-af77-47f8-896b-24153f94b949; detaching it from the instance and deleting it from the info cache [ 1148.155519] env[68285]: DEBUG nova.network.neutron [req-c58c4ff0-5885-490d-8e10-1d707edb37b5 req-bcdd5262-dc5e-43ca-bc04-c735fce00166 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.200517] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.200835] env[68285]: DEBUG nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Instance network_info: |[{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1148.201428] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:fc:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbe7fd71-a38e-450c-a4ef-497eaf455ff0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1148.215020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Creating folder: Project (43491d0bdffc49eaaad084f3124cffcb). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1148.215020] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79556a5e-1425-4fea-89d8-d72014fd1916 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.226754] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Created folder: Project (43491d0bdffc49eaaad084f3124cffcb) in parent group-v580775. [ 1148.227103] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Creating folder: Instances. Parent ref: group-v581009. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1148.227406] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60e52596-ae89-4877-be99-0c6d01e357ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.240837] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Created folder: Instances in parent group-v581009. [ 1148.240837] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1148.241098] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1148.241199] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27a2bd48-9542-49b9-83c0-ba6240d3a0b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.260723] env[68285]: DEBUG nova.compute.manager [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Received event network-changed-dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.260965] env[68285]: DEBUG nova.compute.manager [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Refreshing instance network info cache due to event network-changed-dbe7fd71-a38e-450c-a4ef-497eaf455ff0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1148.261258] env[68285]: DEBUG oslo_concurrency.lockutils [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.261439] env[68285]: DEBUG oslo_concurrency.lockutils [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.261602] env[68285]: DEBUG nova.network.neutron [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Refreshing network info cache for port dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1148.269652] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1148.269652] env[68285]: value = "task-2892094" [ 1148.269652] env[68285]: _type = "Task" [ 1148.269652] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.285148] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892094, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.333947] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.488313] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ae7356b6-7da7-4f97-95e9-58c36e467b33 tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "8c299247-896d-4ff1-b73a-22a71ec972fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.425s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.636179] env[68285]: DEBUG nova.network.neutron [-] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.650534] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892085, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.735458} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.651708] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/16df92d7-466a-491d-b247-71c140d9d824/16df92d7-466a-491d-b247-71c140d9d824.vmdk to [datastore1] ce780600-5dc9-4a60-b54e-415cd1766ffb/ce780600-5dc9-4a60-b54e-415cd1766ffb.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1148.654229] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5a8a5a-89bd-41ad-aa8a-9d9d5ced8382 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.658681] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e11b0a9-401d-4672-b8a9-ae1f48c5c2fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.684715] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] ce780600-5dc9-4a60-b54e-415cd1766ffb/ce780600-5dc9-4a60-b54e-415cd1766ffb.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.688375] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83c84c7f-94ea-4215-a8b8-57f7f4fda6c2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.721485] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80b3faa-961c-45e1-93f1-6aa12dc59cc4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.737511] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1148.737511] env[68285]: value = "task-2892095" [ 1148.737511] env[68285]: _type = "Task" [ 1148.737511] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.753282] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.767479] env[68285]: DEBUG nova.compute.manager [req-c58c4ff0-5885-490d-8e10-1d707edb37b5 req-bcdd5262-dc5e-43ca-bc04-c735fce00166 service nova] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Detach interface failed, port_id=73717dde-af77-47f8-896b-24153f94b949, reason: Instance d4f20336-9c29-4aac-8c0d-f577749cd7d7 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1148.787765] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892094, 'name': CreateVM_Task, 'duration_secs': 0.468764} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.791202] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1148.792438] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.792617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.793142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1148.793263] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c554772a-1c8e-48b1-8c3e-fc8760b28d38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.799038] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1148.799038] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525b8057-aac0-a02e-ba04-7c50431876c0" [ 1148.799038] env[68285]: _type = "Task" [ 1148.799038] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.808711] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525b8057-aac0-a02e-ba04-7c50431876c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.901426] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c174e507-b3d4-4f7e-b1f6-ad28eeae8072 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.910140] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53890ee-fdae-477c-8e08-621a590a6768 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.944454] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a72074-f4bc-48e2-8e0e-acc39b9dff6f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.953466] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2cc82f-413b-449e-a081-ac5845947242 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.969753] env[68285]: DEBUG nova.compute.provider_tree [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.050943] env[68285]: DEBUG nova.network.neutron [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updated VIF entry in instance network info cache for port dbe7fd71-a38e-450c-a4ef-497eaf455ff0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1149.050943] env[68285]: DEBUG nova.network.neutron [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.141386] env[68285]: INFO nova.compute.manager [-] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Took 1.52 seconds to deallocate network for instance. [ 1149.218096] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "1a040977-b57e-4b67-b259-065b788141de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.218712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "1a040977-b57e-4b67-b259-065b788141de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.218712] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "1a040977-b57e-4b67-b259-065b788141de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.218867] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "1a040977-b57e-4b67-b259-065b788141de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.219020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "1a040977-b57e-4b67-b259-065b788141de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.221030] env[68285]: INFO nova.compute.manager [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Terminating instance [ 1149.249899] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892095, 'name': ReconfigVM_Task, 'duration_secs': 0.310802} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.250531] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Reconfigured VM instance instance-00000055 to attach disk [datastore1] ce780600-5dc9-4a60-b54e-415cd1766ffb/ce780600-5dc9-4a60-b54e-415cd1766ffb.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.251159] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f83a09d5-54de-44e1-9e06-48c34ac2d4d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.258867] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1149.258867] env[68285]: value = "task-2892096" [ 1149.258867] env[68285]: _type = "Task" [ 1149.258867] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.269034] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892096, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.311446] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525b8057-aac0-a02e-ba04-7c50431876c0, 'name': SearchDatastore_Task, 'duration_secs': 0.011212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.311676] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.311942] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1149.312220] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.312393] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.312654] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.312947] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1e2eef5-5c9a-4b65-82c7-2624ff32994c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.322939] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.323144] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1149.324684] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcd8e148-824d-4c2e-a315-477ddc6b7b15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.330796] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1149.330796] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520ccc1f-60bc-39fc-5348-2f75c747c3f1" [ 1149.330796] env[68285]: _type = "Task" [ 1149.330796] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.339827] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520ccc1f-60bc-39fc-5348-2f75c747c3f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.473827] env[68285]: DEBUG nova.scheduler.client.report [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1149.553092] env[68285]: DEBUG oslo_concurrency.lockutils [req-d133232f-0b6a-4fd9-8708-d52c7f7901a2 req-e9aaf5ba-32cd-4442-9667-efe96e15ebc6 service nova] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.647464] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.724585] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "refresh_cache-1a040977-b57e-4b67-b259-065b788141de" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.724950] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquired lock "refresh_cache-1a040977-b57e-4b67-b259-065b788141de" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.725035] env[68285]: DEBUG nova.network.neutron [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.770460] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892096, 'name': Rename_Task, 'duration_secs': 0.165676} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.770742] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1149.771310] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fdd29308-ab7e-4a0c-b547-42f5defbb2a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.778692] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1149.778692] env[68285]: value = "task-2892097" [ 1149.778692] env[68285]: _type = "Task" [ 1149.778692] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.789141] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892097, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.815224] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.815224] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.815224] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.815224] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.815224] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.815224] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.815224] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.815561] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.815561] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.815727] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.815872] env[68285]: DEBUG nova.virt.hardware [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.816836] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfa729c-4799-4819-b1c4-275753620a84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.837772] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30a1562-4797-4301-afa7-d0f8300aa371 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.847270] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520ccc1f-60bc-39fc-5348-2f75c747c3f1, 'name': SearchDatastore_Task, 'duration_secs': 0.010564} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.856110] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:af:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9d34554-5a11-451d-b371-8a0cdfc63de6', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.863465] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.863721] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-274e1f4e-2b95-4b3c-8cbf-1557db93d1e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.866321] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.866541] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8fb13fe-740b-4235-a2c6-36c9c2b0fe37 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.885868] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1149.885868] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52242eef-ace5-8940-1262-7f46c4f3d2ef" [ 1149.885868] env[68285]: _type = "Task" [ 1149.885868] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.887182] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.887182] env[68285]: value = "task-2892098" [ 1149.887182] env[68285]: _type = "Task" [ 1149.887182] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.898777] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52242eef-ace5-8940-1262-7f46c4f3d2ef, 'name': SearchDatastore_Task, 'duration_secs': 0.010515} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.901917] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.902230] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d1446290-95ce-4e87-85df-7cc69bb57ce7/d1446290-95ce-4e87-85df-7cc69bb57ce7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1149.902491] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892098, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.902664] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdd7590b-8d75-410e-af5d-6d313273b490 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.909554] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1149.909554] env[68285]: value = "task-2892099" [ 1149.909554] env[68285]: _type = "Task" [ 1149.909554] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.920371] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892099, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.979467] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.981966] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.840s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.983724] env[68285]: INFO nova.compute.claims [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1150.003439] env[68285]: INFO nova.scheduler.client.report [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Deleted allocations for instance f13ad5e7-341f-4475-b334-2144b0923e3b [ 1150.245720] env[68285]: DEBUG nova.network.neutron [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.292033] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892097, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.298286] env[68285]: DEBUG nova.network.neutron [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.400611] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892098, 'name': CreateVM_Task, 'duration_secs': 0.341545} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.400611] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1150.401804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.401804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.401804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1150.402059] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80cd878c-a563-4176-b3eb-b52d967e357f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.406904] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1150.406904] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ec2c3b-c9ec-825e-1208-5e1d26a49088" [ 1150.406904] env[68285]: _type = "Task" [ 1150.406904] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.418760] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892099, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.421584] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ec2c3b-c9ec-825e-1208-5e1d26a49088, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.514259] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28b8900b-4f39-4b25-ac8a-614e4e3d228f tempest-ServersWithSpecificFlavorTestJSON-2121202015 tempest-ServersWithSpecificFlavorTestJSON-2121202015-project-member] Lock "f13ad5e7-341f-4475-b334-2144b0923e3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.828s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.790587] env[68285]: DEBUG oslo_vmware.api [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892097, 'name': PowerOnVM_Task, 'duration_secs': 0.644517} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.790929] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.791071] env[68285]: INFO nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Took 15.44 seconds to spawn the instance on the hypervisor. [ 1150.791267] env[68285]: DEBUG nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.792147] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23baee75-fa89-403b-a44d-dd8adb5aab06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.802199] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Releasing lock "refresh_cache-1a040977-b57e-4b67-b259-065b788141de" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.802624] env[68285]: DEBUG nova.compute.manager [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1150.802821] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1150.803659] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaf0029-9576-4fb3-af43-7ca6f0103e71 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.814129] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1150.814459] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd5bbe56-030e-478e-a9d4-247ff7520d99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.823969] env[68285]: DEBUG oslo_vmware.api [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1150.823969] env[68285]: value = "task-2892100" [ 1150.823969] env[68285]: _type = "Task" [ 1150.823969] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.833853] env[68285]: DEBUG oslo_vmware.api [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.920759] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892099, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523121} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.924321] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d1446290-95ce-4e87-85df-7cc69bb57ce7/d1446290-95ce-4e87-85df-7cc69bb57ce7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1150.924545] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1150.924813] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ec2c3b-c9ec-825e-1208-5e1d26a49088, 'name': SearchDatastore_Task, 'duration_secs': 0.015065} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.925016] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02972d23-70bb-4052-ba89-12513ace53fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.926875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.927109] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1150.927340] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.927489] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.927661] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.927894] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fd0d14a-5a4d-4229-a63d-6c80023e9f59 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.935679] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1150.935679] env[68285]: value = "task-2892101" [ 1150.935679] env[68285]: _type = "Task" [ 1150.935679] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.936775] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.936951] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1150.941081] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ecd9a85-c340-4744-9e18-cc5d496a56ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.949132] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1150.949132] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a0f4cd-e534-9ac7-56a8-5298dd2f27eb" [ 1150.949132] env[68285]: _type = "Task" [ 1150.949132] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.951622] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892101, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.960438] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a0f4cd-e534-9ac7-56a8-5298dd2f27eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010543} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.961327] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8805b821-a744-4c0a-af30-5f01660c2133 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.966598] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1150.966598] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f580b8-ea9c-ccb4-e825-0396a7aa4278" [ 1150.966598] env[68285]: _type = "Task" [ 1150.966598] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.974613] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f580b8-ea9c-ccb4-e825-0396a7aa4278, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.253137] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e418287b-27e5-451a-ade4-de636c4ec095 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.261930] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5ddbe4-c11b-44d0-8973-6b071eefc351 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.293597] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66befbea-2955-4cae-91f5-152acf41527d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.302370] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743589ee-3494-4c27-9868-23861d106bc8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.319515] env[68285]: DEBUG nova.compute.provider_tree [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.322629] env[68285]: INFO nova.compute.manager [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Took 44.16 seconds to build instance. [ 1151.334499] env[68285]: DEBUG oslo_vmware.api [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892100, 'name': PowerOffVM_Task, 'duration_secs': 0.254791} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.334782] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.334946] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.335201] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f91b1218-30d5-4ae0-9c16-3b5f2c5b44e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.365347] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1151.365581] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1151.365770] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleting the datastore file [datastore1] 1a040977-b57e-4b67-b259-065b788141de {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.366060] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bc28378-9457-4491-9990-95bd9afafa95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.373543] env[68285]: DEBUG oslo_vmware.api [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for the task: (returnval){ [ 1151.373543] env[68285]: value = "task-2892103" [ 1151.373543] env[68285]: _type = "Task" [ 1151.373543] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.384295] env[68285]: DEBUG oslo_vmware.api [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.449064] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892101, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.21881} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.449357] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1151.450172] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50906ab4-b0ff-49e9-8885-7681e292fa57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.473796] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] d1446290-95ce-4e87-85df-7cc69bb57ce7/d1446290-95ce-4e87-85df-7cc69bb57ce7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.474540] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5a50604-b5e2-41c2-8f14-1d55d8becd47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.500867] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f580b8-ea9c-ccb4-e825-0396a7aa4278, 'name': SearchDatastore_Task, 'duration_secs': 0.01197} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.500867] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.500867] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1151.500867] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1151.500867] env[68285]: value = "task-2892104" [ 1151.500867] env[68285]: _type = "Task" [ 1151.500867] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.500867] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51aa995a-7ae2-4043-b5fa-aa74483971e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.512052] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892104, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.515378] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1151.515378] env[68285]: value = "task-2892105" [ 1151.515378] env[68285]: _type = "Task" [ 1151.515378] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.524036] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.824437] env[68285]: DEBUG nova.scheduler.client.report [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.833266] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5363b42f-3850-4455-9e7a-8c9d31d6ebe0 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.672s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.886497] env[68285]: DEBUG oslo_vmware.api [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Task: {'id': task-2892103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098376} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.886898] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1151.887200] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1151.887460] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1151.887664] env[68285]: INFO nova.compute.manager [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] [instance: 1a040977-b57e-4b67-b259-065b788141de] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1151.887896] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1151.888107] env[68285]: DEBUG nova.compute.manager [-] [instance: 1a040977-b57e-4b67-b259-065b788141de] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1151.888710] env[68285]: DEBUG nova.network.neutron [-] [instance: 1a040977-b57e-4b67-b259-065b788141de] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1151.911263] env[68285]: DEBUG nova.network.neutron [-] [instance: 1a040977-b57e-4b67-b259-065b788141de] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1152.013595] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892104, 'name': ReconfigVM_Task, 'duration_secs': 0.327163} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.013998] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Reconfigured VM instance instance-00000056 to attach disk [datastore1] d1446290-95ce-4e87-85df-7cc69bb57ce7/d1446290-95ce-4e87-85df-7cc69bb57ce7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.017434] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d4f1975-1548-4b42-bd81-d557c5749980 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.024675] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892105, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48931} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.026238] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1152.027098] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1152.027098] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1152.027098] env[68285]: value = "task-2892106" [ 1152.027098] env[68285]: _type = "Task" [ 1152.027098] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.027321] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8427ce6-1641-48fe-91f3-d0a5b525dea4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.038809] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892106, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.040503] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1152.040503] env[68285]: value = "task-2892107" [ 1152.040503] env[68285]: _type = "Task" [ 1152.040503] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.049275] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892107, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.334861] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.335483] env[68285]: DEBUG nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1152.339008] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.808s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.340461] env[68285]: INFO nova.compute.claims [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1152.417157] env[68285]: DEBUG nova.network.neutron [-] [instance: 1a040977-b57e-4b67-b259-065b788141de] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.539356] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892106, 'name': Rename_Task, 'duration_secs': 0.162071} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.539626] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1152.539879] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d535f69c-186a-415a-87cb-31a01326a2a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.550410] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892107, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083656} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.551634] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1152.551969] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1152.551969] env[68285]: value = "task-2892108" [ 1152.551969] env[68285]: _type = "Task" [ 1152.551969] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.552681] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fe01c3-5235-4257-a961-c8a2024380bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.579019] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.582538] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd1050b4-2a4d-4805-8373-78c3804d6123 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.597016] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.606303] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1152.606303] env[68285]: value = "task-2892109" [ 1152.606303] env[68285]: _type = "Task" [ 1152.606303] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.614156] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892109, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.659858] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "ce780600-5dc9-4a60-b54e-415cd1766ffb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.660248] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.660468] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "ce780600-5dc9-4a60-b54e-415cd1766ffb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.660681] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.660855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.663121] env[68285]: INFO nova.compute.manager [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Terminating instance [ 1152.849393] env[68285]: DEBUG nova.compute.utils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1152.851089] env[68285]: DEBUG nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1152.851347] env[68285]: DEBUG nova.network.neutron [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1152.919658] env[68285]: DEBUG nova.policy [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '412a1e54bbf9406cb6a1e3ca934440de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68bf67c591824d01aa4756c888dba684', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1152.921923] env[68285]: INFO nova.compute.manager [-] [instance: 1a040977-b57e-4b67-b259-065b788141de] Took 1.03 seconds to deallocate network for instance. [ 1153.067875] env[68285]: DEBUG oslo_vmware.api [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892108, 'name': PowerOnVM_Task, 'duration_secs': 0.484734} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.068439] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1153.070203] env[68285]: INFO nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Took 8.13 seconds to spawn the instance on the hypervisor. [ 1153.070203] env[68285]: DEBUG nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1153.070203] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b97b692-f812-4f22-8478-d71b9462bbd5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.116344] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892109, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.167564] env[68285]: DEBUG nova.compute.manager [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1153.167850] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1153.169164] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379acee3-0d81-41f4-bef5-08b97abc2626 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.179547] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1153.179849] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b88286d7-60f7-462c-8c20-decbe092c201 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.191353] env[68285]: DEBUG oslo_vmware.api [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1153.191353] env[68285]: value = "task-2892110" [ 1153.191353] env[68285]: _type = "Task" [ 1153.191353] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.202494] env[68285]: DEBUG oslo_vmware.api [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.356449] env[68285]: DEBUG nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1153.430902] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.487328] env[68285]: DEBUG nova.network.neutron [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Successfully created port: c698959a-27f2-4b51-ab9c-83564bfc6e47 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1153.589662] env[68285]: INFO nova.compute.manager [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Took 29.63 seconds to build instance. [ 1153.620906] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892109, 'name': ReconfigVM_Task, 'duration_secs': 0.581366} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.621207] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5/9c190abd-23ee-4e8e-8b91-9050847581d5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1153.622621] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'size': 0, 'encryption_options': None, 'boot_index': 0, 'disk_bus': None, 'encryption_secret_uuid': None, 'encrypted': False, 'device_type': 'disk', 'guest_format': None, 'encryption_format': None, 'image_id': 'ce84ab4c-9913-42dc-b839-714ad2184867'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'boot_index': None, 'disk_bus': None, 'guest_format': None, 'device_type': None, 'attachment_id': 'e7c4eb6c-fbe0-4bde-900c-ae482cc19524', 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'}, 'volume_type': None}], 'swap': None} {{(pid=68285) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1153.622826] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1153.623080] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1153.623918] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e288f860-2d32-42e3-92cc-0391e3a5ebc9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.647758] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2e4afc-b0f7-450e-a5cc-380a088b32d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.675780] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] volume-91d9331a-255e-4596-9535-7bf73c4b34d3/volume-91d9331a-255e-4596-9535-7bf73c4b34d3.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1153.676962] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71277d9e-6ae0-49ce-8e45-ec5f2d6d509e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.690655] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba83b31a-0042-4d77-9d31-e08454e09960 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.704136] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4d0a87-2763-4711-90d3-78901bad6e02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.707646] env[68285]: DEBUG oslo_vmware.api [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892110, 'name': PowerOffVM_Task, 'duration_secs': 0.209639} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.707926] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1153.707926] env[68285]: value = "task-2892111" [ 1153.707926] env[68285]: _type = "Task" [ 1153.707926] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.708179] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.708342] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1153.708900] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7c0e0cd-dcae-4dda-8f25-1139774fbf1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.738230] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.739428] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1153.741420] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29393cef-6bbb-411c-bd1b-546f9cb290e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.747749] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.754658] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c435d6d-ae58-43f9-ac69-98a6debe98bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.771570] env[68285]: DEBUG nova.compute.provider_tree [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.810111] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1153.810414] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1153.810775] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleting the datastore file [datastore1] ce780600-5dc9-4a60-b54e-415cd1766ffb {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.810886] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fab0565-680d-438d-9bfc-4dbf56c86d37 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.818340] env[68285]: DEBUG oslo_vmware.api [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1153.818340] env[68285]: value = "task-2892113" [ 1153.818340] env[68285]: _type = "Task" [ 1153.818340] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.830011] env[68285]: DEBUG oslo_vmware.api [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.091823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37c4818e-ffb0-4a11-a50e-34ab69411831 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.145s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.222974] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892111, 'name': ReconfigVM_Task, 'duration_secs': 0.339154} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.223285] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfigured VM instance instance-00000049 to attach disk [datastore1] volume-91d9331a-255e-4596-9535-7bf73c4b34d3/volume-91d9331a-255e-4596-9535-7bf73c4b34d3.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1154.228122] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-daef3faa-b1a6-4b4e-951e-44749662ba9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.244282] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1154.244282] env[68285]: value = "task-2892114" [ 1154.244282] env[68285]: _type = "Task" [ 1154.244282] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.256871] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892114, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.259067] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.259067] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.259067] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.259067] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.259067] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.259067] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.259067] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1154.259067] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.274226] env[68285]: DEBUG nova.scheduler.client.report [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1154.333851] env[68285]: DEBUG oslo_vmware.api [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140724} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.334846] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1154.335055] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1154.335230] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1154.335398] env[68285]: INFO nova.compute.manager [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1154.335645] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1154.335838] env[68285]: DEBUG nova.compute.manager [-] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1154.335943] env[68285]: DEBUG nova.network.neutron [-] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1154.369658] env[68285]: DEBUG nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1154.402102] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1154.402358] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1154.402519] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1154.405456] env[68285]: DEBUG nova.virt.hardware [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1154.405456] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c760a7f3-a635-49ab-b8eb-890fbd6f47a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.415235] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc1a7f9-6da4-4086-a0dd-154ef4c769cf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.655861] env[68285]: DEBUG nova.compute.manager [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Received event network-changed-dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1154.656079] env[68285]: DEBUG nova.compute.manager [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Refreshing instance network info cache due to event network-changed-dbe7fd71-a38e-450c-a4ef-497eaf455ff0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1154.656409] env[68285]: DEBUG oslo_concurrency.lockutils [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.656454] env[68285]: DEBUG oslo_concurrency.lockutils [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.656626] env[68285]: DEBUG nova.network.neutron [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Refreshing network info cache for port dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1154.732927] env[68285]: DEBUG nova.compute.manager [req-4c45c55b-3720-4d2f-9f4d-e1a4c8fc3ecf req-3710d11a-f533-4611-b388-3ce0034cf5e2 service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Received event network-vif-deleted-a4826873-4993-493d-8964-49f7a6cd44f9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1154.734058] env[68285]: INFO nova.compute.manager [req-4c45c55b-3720-4d2f-9f4d-e1a4c8fc3ecf req-3710d11a-f533-4611-b388-3ce0034cf5e2 service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Neutron deleted interface a4826873-4993-493d-8964-49f7a6cd44f9; detaching it from the instance and deleting it from the info cache [ 1154.734058] env[68285]: DEBUG nova.network.neutron [req-4c45c55b-3720-4d2f-9f4d-e1a4c8fc3ecf req-3710d11a-f533-4611-b388-3ce0034cf5e2 service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.754440] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892114, 'name': ReconfigVM_Task, 'duration_secs': 0.170096} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.754733] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1154.755530] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8969cd34-71ec-4465-b3b2-50eca9a8bfd2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.761637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.764726] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1154.764726] env[68285]: value = "task-2892115" [ 1154.764726] env[68285]: _type = "Task" [ 1154.764726] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.774360] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892115, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.780191] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.780740] env[68285]: DEBUG nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1154.783481] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.762s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.785104] env[68285]: INFO nova.compute.claims [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.220638] env[68285]: DEBUG nova.network.neutron [-] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.237989] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f706c0e-b83b-4e02-9409-6c7a8e6b19e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.249012] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d71884a-735f-4a27-bdf2-7393e0c3b916 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.275754] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892115, 'name': Rename_Task, 'duration_secs': 0.167548} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.287702] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.288767] env[68285]: DEBUG nova.compute.manager [req-4c45c55b-3720-4d2f-9f4d-e1a4c8fc3ecf req-3710d11a-f533-4611-b388-3ce0034cf5e2 service nova] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Detach interface failed, port_id=a4826873-4993-493d-8964-49f7a6cd44f9, reason: Instance ce780600-5dc9-4a60-b54e-415cd1766ffb could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1155.290073] env[68285]: DEBUG nova.network.neutron [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Successfully updated port: c698959a-27f2-4b51-ab9c-83564bfc6e47 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1155.291435] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbdf052e-4548-4e3a-bc41-180998bb966c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.301443] env[68285]: DEBUG nova.compute.utils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1155.304742] env[68285]: DEBUG nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1155.305130] env[68285]: DEBUG nova.network.neutron [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1155.315581] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1155.315581] env[68285]: value = "task-2892116" [ 1155.315581] env[68285]: _type = "Task" [ 1155.315581] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.331508] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.401243] env[68285]: DEBUG nova.policy [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '412a1e54bbf9406cb6a1e3ca934440de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68bf67c591824d01aa4756c888dba684', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1155.727244] env[68285]: INFO nova.compute.manager [-] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Took 1.39 seconds to deallocate network for instance. [ 1155.753373] env[68285]: DEBUG nova.network.neutron [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Successfully created port: ea845a8a-8eb0-4821-aef2-d5e99dd606ec {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1155.804675] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "refresh_cache-589d1560-9269-4de2-bd79-454ebdaa40d4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.804855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired lock "refresh_cache-589d1560-9269-4de2-bd79-454ebdaa40d4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.805151] env[68285]: DEBUG nova.network.neutron [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.811475] env[68285]: DEBUG nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1155.831685] env[68285]: DEBUG oslo_vmware.api [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892116, 'name': PowerOnVM_Task, 'duration_secs': 0.508922} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.832043] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1155.832266] env[68285]: DEBUG nova.compute.manager [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1155.833098] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e990b4-cb57-47f1-a9c7-20e935a8ec8d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.859859] env[68285]: DEBUG nova.network.neutron [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updated VIF entry in instance network info cache for port dbe7fd71-a38e-450c-a4ef-497eaf455ff0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1155.859859] env[68285]: DEBUG nova.network.neutron [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.125345] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4c9b2b-3014-4871-8074-fb8f004a8349 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.132765] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e704781-bcc9-4a52-8d71-dad999133f20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.166451] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6186c2fe-fb7e-4485-a098-89490496ca2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.175294] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a632e2d-3c73-4c93-a0bd-7d838b25a3ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.190518] env[68285]: DEBUG nova.compute.provider_tree [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.234647] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.360938] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.362540] env[68285]: DEBUG nova.network.neutron [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1156.364590] env[68285]: DEBUG oslo_concurrency.lockutils [req-e131e38a-3fab-40c6-9afe-d9a517af5514 req-efea0d52-a1c3-40b6-99d4-32e37c0af1cf service nova] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.637381] env[68285]: DEBUG nova.network.neutron [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Updating instance_info_cache with network_info: [{"id": "c698959a-27f2-4b51-ab9c-83564bfc6e47", "address": "fa:16:3e:6a:51:e4", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc698959a-27", "ovs_interfaceid": "c698959a-27f2-4b51-ab9c-83564bfc6e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.677928] env[68285]: DEBUG nova.compute.manager [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Received event network-vif-plugged-c698959a-27f2-4b51-ab9c-83564bfc6e47 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1156.678137] env[68285]: DEBUG oslo_concurrency.lockutils [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] Acquiring lock "589d1560-9269-4de2-bd79-454ebdaa40d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.678350] env[68285]: DEBUG oslo_concurrency.lockutils [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.678516] env[68285]: DEBUG oslo_concurrency.lockutils [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.678681] env[68285]: DEBUG nova.compute.manager [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] No waiting events found dispatching network-vif-plugged-c698959a-27f2-4b51-ab9c-83564bfc6e47 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1156.679017] env[68285]: WARNING nova.compute.manager [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Received unexpected event network-vif-plugged-c698959a-27f2-4b51-ab9c-83564bfc6e47 for instance with vm_state building and task_state spawning. [ 1156.679199] env[68285]: DEBUG nova.compute.manager [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Received event network-changed-c698959a-27f2-4b51-ab9c-83564bfc6e47 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1156.679354] env[68285]: DEBUG nova.compute.manager [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Refreshing instance network info cache due to event network-changed-c698959a-27f2-4b51-ab9c-83564bfc6e47. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1156.679598] env[68285]: DEBUG oslo_concurrency.lockutils [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] Acquiring lock "refresh_cache-589d1560-9269-4de2-bd79-454ebdaa40d4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.693558] env[68285]: DEBUG nova.scheduler.client.report [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.822945] env[68285]: DEBUG nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1156.858234] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1156.858964] env[68285]: DEBUG nova.virt.hardware [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1156.859935] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2a0347-4f0a-4c4e-a955-05c5d7718f40 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.868869] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c75127-7902-4801-9a30-0f28b85ee704 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.141706] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Releasing lock "refresh_cache-589d1560-9269-4de2-bd79-454ebdaa40d4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.142069] env[68285]: DEBUG nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Instance network_info: |[{"id": "c698959a-27f2-4b51-ab9c-83564bfc6e47", "address": "fa:16:3e:6a:51:e4", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc698959a-27", "ovs_interfaceid": "c698959a-27f2-4b51-ab9c-83564bfc6e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1157.143863] env[68285]: DEBUG oslo_concurrency.lockutils [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] Acquired lock "refresh_cache-589d1560-9269-4de2-bd79-454ebdaa40d4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.143863] env[68285]: DEBUG nova.network.neutron [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Refreshing network info cache for port c698959a-27f2-4b51-ab9c-83564bfc6e47 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1157.143863] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:51:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c698959a-27f2-4b51-ab9c-83564bfc6e47', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1157.153438] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Creating folder: Project (68bf67c591824d01aa4756c888dba684). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1157.154700] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-018b5e90-2359-40c1-bb7e-b0cb92e13727 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.168045] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Created folder: Project (68bf67c591824d01aa4756c888dba684) in parent group-v580775. [ 1157.168249] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Creating folder: Instances. Parent ref: group-v581013. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1157.168506] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2983686c-fe80-44bb-8d87-30bc2f43a392 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.181112] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Created folder: Instances in parent group-v581013. [ 1157.181112] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.181112] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1157.181112] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-586ccb17-d3e5-49c7-bda9-07f04a088d9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.198450] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.199193] env[68285]: DEBUG nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1157.203894] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.273s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.203894] env[68285]: DEBUG nova.objects.instance [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'resources' on Instance uuid 15fd3159-0fff-461d-96ce-f8cfc04eff32 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.210105] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1157.210105] env[68285]: value = "task-2892119" [ 1157.210105] env[68285]: _type = "Task" [ 1157.210105] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.221721] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892119, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.235584] env[68285]: DEBUG nova.network.neutron [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Successfully updated port: ea845a8a-8eb0-4821-aef2-d5e99dd606ec {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1157.704962] env[68285]: DEBUG nova.compute.utils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1157.706422] env[68285]: DEBUG nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1157.706587] env[68285]: DEBUG nova.network.neutron [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1157.722288] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892119, 'name': CreateVM_Task, 'duration_secs': 0.387609} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.722288] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1157.723063] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.723145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.723455] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1157.723720] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-475026f2-e2bd-4486-a14f-a3500525c693 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.732805] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1157.732805] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524801a1-9fbb-8f24-f2af-706db4882524" [ 1157.732805] env[68285]: _type = "Task" [ 1157.732805] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.738601] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "refresh_cache-8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.738694] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired lock "refresh_cache-8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.738839] env[68285]: DEBUG nova.network.neutron [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1157.745566] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524801a1-9fbb-8f24-f2af-706db4882524, 'name': SearchDatastore_Task, 'duration_secs': 0.01413} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.748257] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.748514] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1157.748748] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.748894] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.749125] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.749539] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d03c1fc-02f5-4c29-9ef2-52846401c00f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.759314] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1157.759557] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1157.760283] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93667279-631f-4732-a8f7-9236c7ef652a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.767749] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1157.767749] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522a15eb-5687-50b2-060b-5f452158e73d" [ 1157.767749] env[68285]: _type = "Task" [ 1157.767749] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.772411] env[68285]: DEBUG nova.policy [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fee422406a774be7830837baa9743f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7106da1f6bcb4d0cb3dcad984b3adb33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1157.777549] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522a15eb-5687-50b2-060b-5f452158e73d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.921664] env[68285]: DEBUG nova.network.neutron [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Updated VIF entry in instance network info cache for port c698959a-27f2-4b51-ab9c-83564bfc6e47. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.921664] env[68285]: DEBUG nova.network.neutron [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Updating instance_info_cache with network_info: [{"id": "c698959a-27f2-4b51-ab9c-83564bfc6e47", "address": "fa:16:3e:6a:51:e4", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc698959a-27", "ovs_interfaceid": "c698959a-27f2-4b51-ab9c-83564bfc6e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.023018] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.023018] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.045433] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6866b02-9b16-46ea-920d-eb09e4f29c4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.054455] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a99db38-7c34-40c1-ad56-61f2f46733e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.088605] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04071422-c7c6-4a32-87a0-244119bd0d30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.096997] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e534bc-3d74-419c-adc9-b30d581a9da1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.113351] env[68285]: DEBUG nova.compute.provider_tree [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.212810] env[68285]: DEBUG nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1158.239580] env[68285]: DEBUG nova.network.neutron [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Successfully created port: 10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1158.285021] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522a15eb-5687-50b2-060b-5f452158e73d, 'name': SearchDatastore_Task, 'duration_secs': 0.010879} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.285021] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ad9e44d-9ff7-470c-b998-32225a1ea858 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.293138] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1158.293138] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525d11e6-c760-0dac-00e2-68c4708a6b7d" [ 1158.293138] env[68285]: _type = "Task" [ 1158.293138] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.299171] env[68285]: DEBUG nova.network.neutron [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1158.306748] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525d11e6-c760-0dac-00e2-68c4708a6b7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.426045] env[68285]: DEBUG oslo_concurrency.lockutils [req-3445aa47-e448-42c5-9623-a5646d9de59c req-88f980a0-8108-4a2b-bb8d-4852a6ed9214 service nova] Releasing lock "refresh_cache-589d1560-9269-4de2-bd79-454ebdaa40d4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.460924] env[68285]: DEBUG nova.network.neutron [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Updating instance_info_cache with network_info: [{"id": "ea845a8a-8eb0-4821-aef2-d5e99dd606ec", "address": "fa:16:3e:53:ac:f5", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea845a8a-8e", "ovs_interfaceid": "ea845a8a-8eb0-4821-aef2-d5e99dd606ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.532125] env[68285]: DEBUG nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1158.616776] env[68285]: DEBUG nova.scheduler.client.report [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1158.771161] env[68285]: DEBUG nova.compute.manager [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Received event network-vif-plugged-ea845a8a-8eb0-4821-aef2-d5e99dd606ec {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1158.771431] env[68285]: DEBUG oslo_concurrency.lockutils [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] Acquiring lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.771662] env[68285]: DEBUG oslo_concurrency.lockutils [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.771836] env[68285]: DEBUG oslo_concurrency.lockutils [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.772015] env[68285]: DEBUG nova.compute.manager [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] No waiting events found dispatching network-vif-plugged-ea845a8a-8eb0-4821-aef2-d5e99dd606ec {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1158.774160] env[68285]: WARNING nova.compute.manager [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Received unexpected event network-vif-plugged-ea845a8a-8eb0-4821-aef2-d5e99dd606ec for instance with vm_state building and task_state spawning. [ 1158.774373] env[68285]: DEBUG nova.compute.manager [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Received event network-changed-ea845a8a-8eb0-4821-aef2-d5e99dd606ec {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1158.774584] env[68285]: DEBUG nova.compute.manager [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Refreshing instance network info cache due to event network-changed-ea845a8a-8eb0-4821-aef2-d5e99dd606ec. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1158.774780] env[68285]: DEBUG oslo_concurrency.lockutils [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] Acquiring lock "refresh_cache-8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.804312] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525d11e6-c760-0dac-00e2-68c4708a6b7d, 'name': SearchDatastore_Task, 'duration_secs': 0.011441} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.804608] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.804818] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 589d1560-9269-4de2-bd79-454ebdaa40d4/589d1560-9269-4de2-bd79-454ebdaa40d4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1158.805106] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbc8abf8-e1cb-425a-a715-acc6dd816dcd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.812606] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1158.812606] env[68285]: value = "task-2892120" [ 1158.812606] env[68285]: _type = "Task" [ 1158.812606] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.822303] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892120, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.963139] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Releasing lock "refresh_cache-8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.963419] env[68285]: DEBUG nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Instance network_info: |[{"id": "ea845a8a-8eb0-4821-aef2-d5e99dd606ec", "address": "fa:16:3e:53:ac:f5", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea845a8a-8e", "ovs_interfaceid": "ea845a8a-8eb0-4821-aef2-d5e99dd606ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1158.963742] env[68285]: DEBUG oslo_concurrency.lockutils [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] Acquired lock "refresh_cache-8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.963919] env[68285]: DEBUG nova.network.neutron [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Refreshing network info cache for port ea845a8a-8eb0-4821-aef2-d5e99dd606ec {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1158.965249] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:ac:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea845a8a-8eb0-4821-aef2-d5e99dd606ec', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1158.973668] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1158.974688] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1158.974916] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee1973e6-2a9b-4f8b-9ead-0c29146afae8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.997413] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1158.997413] env[68285]: value = "task-2892121" [ 1158.997413] env[68285]: _type = "Task" [ 1158.997413] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.007426] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892121, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.055462] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.122048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.124177] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.717s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.126133] env[68285]: INFO nova.compute.claims [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1159.141160] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Acquiring lock "94652533-8c34-42fa-8d70-4effc307ec71" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.141683] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "94652533-8c34-42fa-8d70-4effc307ec71" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.142113] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Acquiring lock "94652533-8c34-42fa-8d70-4effc307ec71-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.142323] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "94652533-8c34-42fa-8d70-4effc307ec71-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.142533] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "94652533-8c34-42fa-8d70-4effc307ec71-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.145301] env[68285]: INFO nova.scheduler.client.report [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted allocations for instance 15fd3159-0fff-461d-96ce-f8cfc04eff32 [ 1159.146577] env[68285]: INFO nova.compute.manager [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Terminating instance [ 1159.223212] env[68285]: DEBUG nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1159.250756] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.251149] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.251208] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.251395] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.251626] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.251788] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.252013] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.252229] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.252351] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.252516] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.252689] env[68285]: DEBUG nova.virt.hardware [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.253788] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f48a7f0-f493-48ed-9cce-d9cb24a80dce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.263400] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775c2d14-8fbc-4964-a0dd-502f2db2e2ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.325465] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892120, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497471} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.325729] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 589d1560-9269-4de2-bd79-454ebdaa40d4/589d1560-9269-4de2-bd79-454ebdaa40d4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1159.325890] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1159.326206] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a07132ae-ddcd-4e1c-9741-b8655302bf7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.333399] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1159.333399] env[68285]: value = "task-2892122" [ 1159.333399] env[68285]: _type = "Task" [ 1159.333399] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.344848] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892122, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.509073] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892121, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.654607] env[68285]: DEBUG nova.compute.manager [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1159.654828] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1159.655319] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c9b853de-3cd5-4108-9d2e-07c72e53623d tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "15fd3159-0fff-461d-96ce-f8cfc04eff32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.521s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.656179] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67664d13-bd6c-4585-8c54-2e508b8a4cae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.664906] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1159.664906] env[68285]: value = "task-2892123" [ 1159.664906] env[68285]: _type = "Task" [ 1159.664906] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.675756] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.682595] env[68285]: DEBUG nova.network.neutron [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Updated VIF entry in instance network info cache for port ea845a8a-8eb0-4821-aef2-d5e99dd606ec. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1159.683053] env[68285]: DEBUG nova.network.neutron [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Updating instance_info_cache with network_info: [{"id": "ea845a8a-8eb0-4821-aef2-d5e99dd606ec", "address": "fa:16:3e:53:ac:f5", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea845a8a-8e", "ovs_interfaceid": "ea845a8a-8eb0-4821-aef2-d5e99dd606ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.844553] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.218281} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.844831] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1159.845643] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae30e9f-e284-42ae-b04c-dbd7fa32f6f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.869154] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 589d1560-9269-4de2-bd79-454ebdaa40d4/589d1560-9269-4de2-bd79-454ebdaa40d4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.869485] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ef416a8-599f-41c1-88f1-5d67753cca57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.895344] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1159.895344] env[68285]: value = "task-2892124" [ 1159.895344] env[68285]: _type = "Task" [ 1159.895344] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.904023] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.010098] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892121, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.020852] env[68285]: DEBUG nova.compute.manager [req-97b0e990-8145-4dc9-8138-62ca4602ae5c req-4fe25b43-3f50-408b-8bc7-0ca39bce90b4 service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Received event network-vif-plugged-10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1160.021344] env[68285]: DEBUG oslo_concurrency.lockutils [req-97b0e990-8145-4dc9-8138-62ca4602ae5c req-4fe25b43-3f50-408b-8bc7-0ca39bce90b4 service nova] Acquiring lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.021344] env[68285]: DEBUG oslo_concurrency.lockutils [req-97b0e990-8145-4dc9-8138-62ca4602ae5c req-4fe25b43-3f50-408b-8bc7-0ca39bce90b4 service nova] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.021451] env[68285]: DEBUG oslo_concurrency.lockutils [req-97b0e990-8145-4dc9-8138-62ca4602ae5c req-4fe25b43-3f50-408b-8bc7-0ca39bce90b4 service nova] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.021614] env[68285]: DEBUG nova.compute.manager [req-97b0e990-8145-4dc9-8138-62ca4602ae5c req-4fe25b43-3f50-408b-8bc7-0ca39bce90b4 service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] No waiting events found dispatching network-vif-plugged-10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1160.021772] env[68285]: WARNING nova.compute.manager [req-97b0e990-8145-4dc9-8138-62ca4602ae5c req-4fe25b43-3f50-408b-8bc7-0ca39bce90b4 service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Received unexpected event network-vif-plugged-10900535-c864-4616-a243-0798b3cdb70a for instance with vm_state building and task_state spawning. [ 1160.062657] env[68285]: DEBUG nova.network.neutron [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Successfully updated port: 10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1160.179833] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892123, 'name': PowerOffVM_Task, 'duration_secs': 0.232504} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.180261] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1160.180423] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1160.180617] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580976', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'name': 'volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94652533-8c34-42fa-8d70-4effc307ec71', 'attached_at': '', 'detached_at': '', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'serial': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1160.184056] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9100ddd3-30f3-4660-b9ad-7c7122c8e793 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.187399] env[68285]: DEBUG oslo_concurrency.lockutils [req-dd214b0d-46a1-4926-9936-52201d47b90a req-3d9fb517-3667-4603-98b6-49ce87cdfc1a service nova] Releasing lock "refresh_cache-8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.209615] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d6dc34-2402-4638-b5a8-ec31250fb8c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.217665] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcfff18-c6df-4da9-89dc-c98322dadc27 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.238463] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bbdd46-f4cd-4bb9-bdb6-7afc8d846510 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.254422] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] The volume has not been displaced from its original location: [datastore1] volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb/volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1160.259686] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1160.262606] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dc7f112-fbec-4111-a2fc-8fcda48160e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.282414] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1160.282414] env[68285]: value = "task-2892125" [ 1160.282414] env[68285]: _type = "Task" [ 1160.282414] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.295449] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892125, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.410496] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.504881] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032f691e-c9ea-40dd-92a4-54424eac51f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.515217] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d64240e-1feb-4f4b-b32b-2a4bae045bd4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.518538] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892121, 'name': CreateVM_Task, 'duration_secs': 1.082797} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.518712] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1160.519710] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.519873] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.520212] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1160.520484] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e0b4b84-8253-4862-ad11-a437dac4f46b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.548437] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ae158f-9844-4851-9f0f-7b3ac233d81e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.552470] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1160.552470] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]526cf414-d24e-0e64-02c9-1d40d897c57b" [ 1160.552470] env[68285]: _type = "Task" [ 1160.552470] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.559377] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed148b2-28f7-4d31-9a7b-e436e3720b74 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.567147] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.567294] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.567448] env[68285]: DEBUG nova.network.neutron [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1160.568597] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526cf414-d24e-0e64-02c9-1d40d897c57b, 'name': SearchDatastore_Task, 'duration_secs': 0.010395} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.569384] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.569608] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1160.569828] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.569972] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.570165] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1160.570456] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba930a29-89d6-40dd-8ba7-b3e7a0402c57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.580965] env[68285]: DEBUG nova.compute.provider_tree [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.592370] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1160.592370] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1160.592370] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d77af1c2-e5f8-4e40-9770-d4624c1328c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.597527] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1160.597527] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522fd461-a2cb-a79f-1c10-a8f35654867f" [ 1160.597527] env[68285]: _type = "Task" [ 1160.597527] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.606543] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522fd461-a2cb-a79f-1c10-a8f35654867f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.792798] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892125, 'name': ReconfigVM_Task, 'duration_secs': 0.175693} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.793132] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1160.798180] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ea8d741-faa7-4037-bc51-81a36e119b2f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.817037] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1160.817037] env[68285]: value = "task-2892126" [ 1160.817037] env[68285]: _type = "Task" [ 1160.817037] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.828295] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892126, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.906549] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892124, 'name': ReconfigVM_Task, 'duration_secs': 0.864061} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.906836] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 589d1560-9269-4de2-bd79-454ebdaa40d4/589d1560-9269-4de2-bd79-454ebdaa40d4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1160.907682] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14c6ce89-b573-4723-91ba-79cac126a0af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.914580] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1160.914580] env[68285]: value = "task-2892127" [ 1160.914580] env[68285]: _type = "Task" [ 1160.914580] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.923666] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892127, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.948835] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.949077] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.975468] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.975736] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.975918] env[68285]: INFO nova.compute.manager [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Shelving [ 1161.084620] env[68285]: DEBUG nova.scheduler.client.report [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1161.108895] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522fd461-a2cb-a79f-1c10-a8f35654867f, 'name': SearchDatastore_Task, 'duration_secs': 0.009867} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.109703] env[68285]: DEBUG nova.network.neutron [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1161.112459] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f14296-2707-414e-b28c-616e5e590b41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.118304] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1161.118304] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e03d8d-93be-ee47-23df-77c38d0f932e" [ 1161.118304] env[68285]: _type = "Task" [ 1161.118304] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.127965] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e03d8d-93be-ee47-23df-77c38d0f932e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.257322] env[68285]: DEBUG nova.network.neutron [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.327414] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892126, 'name': ReconfigVM_Task, 'duration_secs': 0.177988} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.327736] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-580976', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'name': 'volume-df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94652533-8c34-42fa-8d70-4effc307ec71', 'attached_at': '', 'detached_at': '', 'volume_id': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb', 'serial': 'df3857a1-3fc9-43c0-a99d-e1a7509342bb'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1161.328075] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.328972] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8289c5-1819-491d-8b89-7113159c360f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.336175] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1161.336446] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5c65e60-69cd-454e-983d-70bfc5cc2001 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.400248] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1161.400687] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1161.400910] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Deleting the datastore file [datastore1] 94652533-8c34-42fa-8d70-4effc307ec71 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1161.401621] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eecb5bab-ee42-4a35-acbc-9654592eca81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.408591] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for the task: (returnval){ [ 1161.408591] env[68285]: value = "task-2892129" [ 1161.408591] env[68285]: _type = "Task" [ 1161.408591] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.416749] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.424648] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892127, 'name': Rename_Task, 'duration_secs': 0.417276} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.424917] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1161.425207] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6552ee51-43b0-44ad-8bfd-10ba0a54bbee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.431060] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1161.431060] env[68285]: value = "task-2892130" [ 1161.431060] env[68285]: _type = "Task" [ 1161.431060] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.438718] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.451340] env[68285]: DEBUG nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1161.589288] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.590477] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.380s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.592027] env[68285]: INFO nova.compute.claims [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.632034] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e03d8d-93be-ee47-23df-77c38d0f932e, 'name': SearchDatastore_Task, 'duration_secs': 0.016089} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.632787] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.633061] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc/8fd23cb4-45da-4bd9-a258-845eb3f6a1dc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1161.633347] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b13235b-d709-4050-800c-6478748c22d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.641467] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1161.641467] env[68285]: value = "task-2892131" [ 1161.641467] env[68285]: _type = "Task" [ 1161.641467] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.651121] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892131, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.760272] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.760683] env[68285]: DEBUG nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Instance network_info: |[{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1161.761183] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:12:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10900535-c864-4616-a243-0798b3cdb70a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1161.769242] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1161.770148] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1161.770452] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23e0bf66-bd03-42d0-bec5-454ddbd30d02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.793036] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1161.793036] env[68285]: value = "task-2892132" [ 1161.793036] env[68285]: _type = "Task" [ 1161.793036] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.802115] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892132, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.919663] env[68285]: DEBUG oslo_vmware.api [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Task: {'id': task-2892129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08045} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.920029] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1161.920465] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1161.920465] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1161.920797] env[68285]: INFO nova.compute.manager [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1161.921068] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1161.921313] env[68285]: DEBUG nova.compute.manager [-] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1161.921453] env[68285]: DEBUG nova.network.neutron [-] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1161.944589] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892130, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.975066] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.987555] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.987915] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7eea136d-0677-4082-99d0-6233abee9ebd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.997385] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1161.997385] env[68285]: value = "task-2892133" [ 1161.997385] env[68285]: _type = "Task" [ 1161.997385] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.009051] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.057860] env[68285]: DEBUG nova.compute.manager [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Received event network-changed-10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1162.058091] env[68285]: DEBUG nova.compute.manager [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Refreshing instance network info cache due to event network-changed-10900535-c864-4616-a243-0798b3cdb70a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1162.058343] env[68285]: DEBUG oslo_concurrency.lockutils [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] Acquiring lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.058557] env[68285]: DEBUG oslo_concurrency.lockutils [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] Acquired lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.058682] env[68285]: DEBUG nova.network.neutron [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Refreshing network info cache for port 10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.098467] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "ee1b7fdb-fb63-449c-a3c2-cec8c0bfb817" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.098467] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "ee1b7fdb-fb63-449c-a3c2-cec8c0bfb817" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.155510] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892131, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.307463] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892132, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.442279] env[68285]: DEBUG oslo_vmware.api [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892130, 'name': PowerOnVM_Task, 'duration_secs': 0.76237} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.442552] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1162.442763] env[68285]: INFO nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Took 8.07 seconds to spawn the instance on the hypervisor. [ 1162.442944] env[68285]: DEBUG nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1162.443771] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbf147a-8836-4001-a253-19de6e3262a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.508067] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892133, 'name': PowerOffVM_Task, 'duration_secs': 0.332258} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.508343] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.509160] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54c3957-b6f7-4136-9c74-414a3f5536ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.530777] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487535d2-c833-42b5-a0f3-d097e39766e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.608033] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "ee1b7fdb-fb63-449c-a3c2-cec8c0bfb817" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.511s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.608033] env[68285]: DEBUG nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1162.661735] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892131, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532083} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.664286] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc/8fd23cb4-45da-4bd9-a258-845eb3f6a1dc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1162.664519] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1162.664990] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39d2ebc9-f2a2-4f0d-b921-8cddb629f819 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.672753] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1162.672753] env[68285]: value = "task-2892134" [ 1162.672753] env[68285]: _type = "Task" [ 1162.672753] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.681982] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.806992] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892132, 'name': CreateVM_Task, 'duration_secs': 0.597157} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.807266] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1162.809209] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.809513] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.809734] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1162.812957] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08094e08-3b2a-46a2-b7fc-d5e14cbc2f1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.820723] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1162.820723] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52913f40-5434-c36a-e1e6-600ee7c4e53a" [ 1162.820723] env[68285]: _type = "Task" [ 1162.820723] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.835084] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52913f40-5434-c36a-e1e6-600ee7c4e53a, 'name': SearchDatastore_Task, 'duration_secs': 0.010418} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.835362] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.835599] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1162.835836] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.835980] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.836171] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1162.838755] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52eaf448-fda8-406e-992c-e847931f6b0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.848252] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1162.848446] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1162.849178] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-450b19c0-d3c5-495b-b3a3-7c8c7e148df3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.858028] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1162.858028] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52930975-4f69-5a94-df9d-93c20584e9bf" [ 1162.858028] env[68285]: _type = "Task" [ 1162.858028] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.865941] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52930975-4f69-5a94-df9d-93c20584e9bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.883953] env[68285]: DEBUG nova.network.neutron [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updated VIF entry in instance network info cache for port 10900535-c864-4616-a243-0798b3cdb70a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.884313] env[68285]: DEBUG nova.network.neutron [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.920258] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ca4441-258c-4cc6-b14a-73c967a4bcfb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.927473] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfde546c-bd08-44cd-aa75-71fa5f6585c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.970569] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0040a8b7-79ca-4e4f-98fc-4ff3b7d81a18 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.972430] env[68285]: INFO nova.compute.manager [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Took 27.86 seconds to build instance. [ 1162.977616] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2418f4f4-38e1-4094-aa16-f7c18f016bfa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.992079] env[68285]: DEBUG nova.network.neutron [-] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.993713] env[68285]: DEBUG nova.compute.provider_tree [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.042297] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1163.042615] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-78c65301-e484-49a3-a69e-cbd34914e4ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.051232] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1163.051232] env[68285]: value = "task-2892135" [ 1163.051232] env[68285]: _type = "Task" [ 1163.051232] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.060229] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892135, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.115019] env[68285]: DEBUG nova.compute.utils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1163.115019] env[68285]: DEBUG nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1163.115019] env[68285]: DEBUG nova.network.neutron [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1163.173544] env[68285]: DEBUG nova.policy [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92c023de13ff465c8ff94528e4336cf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7fde26ee64641bbb6142d670295de12', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1163.184861] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.368796] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52930975-4f69-5a94-df9d-93c20584e9bf, 'name': SearchDatastore_Task, 'duration_secs': 0.010407} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.369722] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bea30ea-9c67-433c-82ac-aef6b04000b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.375966] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1163.375966] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c0a9c9-7013-ce69-237a-eeaf0f298092" [ 1163.375966] env[68285]: _type = "Task" [ 1163.375966] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.384594] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c0a9c9-7013-ce69-237a-eeaf0f298092, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.387176] env[68285]: DEBUG oslo_concurrency.lockutils [req-537d4e6e-e552-45ba-b0ce-a83b93974af5 req-fe2100df-9691-4cd2-8129-ffda8f20afab service nova] Releasing lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.464346] env[68285]: DEBUG nova.network.neutron [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Successfully created port: 3dffbc03-2bda-47c0-b305-c3f2e1e519bf {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1163.475105] env[68285]: DEBUG oslo_concurrency.lockutils [None req-19605c9a-a2bf-4207-9956-76839016941b tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.377s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.495844] env[68285]: INFO nova.compute.manager [-] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Took 1.57 seconds to deallocate network for instance. [ 1163.498916] env[68285]: DEBUG nova.scheduler.client.report [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.562734] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892135, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.618422] env[68285]: DEBUG nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1163.688547] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.887336] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c0a9c9-7013-ce69-237a-eeaf0f298092, 'name': SearchDatastore_Task, 'duration_secs': 0.06994} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.887655] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.887977] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1163.888326] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28005ac6-6bda-4384-8329-5a4e061a06af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.896197] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1163.896197] env[68285]: value = "task-2892136" [ 1163.896197] env[68285]: _type = "Task" [ 1163.896197] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.907399] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.005874] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.006468] env[68285]: DEBUG nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1164.009084] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.009287] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.011480] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.491s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.011601] env[68285]: DEBUG nova.objects.instance [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'resources' on Instance uuid 3094ed52-33c2-40ff-ac77-6bb975a2f681 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.045919] env[68285]: INFO nova.scheduler.client.report [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted allocations for instance 1f5fe064-0443-4b7f-911a-45d803836eeb [ 1164.057973] env[68285]: INFO nova.compute.manager [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Took 0.56 seconds to detach 1 volumes for instance. [ 1164.060309] env[68285]: DEBUG nova.compute.manager [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Deleting volume: df3857a1-3fc9-43c0-a99d-e1a7509342bb {{(pid=68285) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1164.069096] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892135, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.101172] env[68285]: DEBUG nova.compute.manager [req-0cd3fde2-c93f-4b6a-90e3-a44ce22fe696 req-7ed5e3fb-c04d-4a57-9e15-99018e7ce1e1 service nova] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Received event network-vif-deleted-6c7e74ef-9f39-486f-8e6c-0e8339dac843 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1164.189930] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892134, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.056982} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.190464] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1164.191823] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646d24ad-a75d-4b28-b810-428cd92b9814 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.232488] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc/8fd23cb4-45da-4bd9-a258-845eb3f6a1dc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1164.234203] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e89edac-37eb-4ac3-8a6f-459d863adfb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.270514] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1164.270514] env[68285]: value = "task-2892138" [ 1164.270514] env[68285]: _type = "Task" [ 1164.270514] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.280312] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892138, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.407024] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892136, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472933} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.407349] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1164.407506] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1164.407768] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d17a619-12f9-496a-8659-a703fadf72a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.415662] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1164.415662] env[68285]: value = "task-2892139" [ 1164.415662] env[68285]: _type = "Task" [ 1164.415662] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.432363] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892139, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.514743] env[68285]: DEBUG nova.compute.utils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1164.519331] env[68285]: DEBUG nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1164.519515] env[68285]: DEBUG nova.network.neutron [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.554887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-614c97cd-0137-4b9b-a295-bd4431c93eb7 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "1f5fe064-0443-4b7f-911a-45d803836eeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.124s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.573927] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892135, 'name': CreateSnapshot_Task, 'duration_secs': 1.059193} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.575525] env[68285]: DEBUG nova.policy [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64faebf5ce1549fe938f12248656d8d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2727048b316143c7bfa2aef4f9b264f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.579322] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1164.581103] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd35b47-b822-4205-b275-4e27671ec675 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.623108] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.632739] env[68285]: DEBUG nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1164.658230] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1164.658564] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1164.658809] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1164.659103] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1164.659263] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1164.659463] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1164.659720] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1164.659926] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1164.660150] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1164.660365] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1164.660601] env[68285]: DEBUG nova.virt.hardware [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1164.661519] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a46a3b7-6d23-43c1-83d5-8a05ee10450b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.672937] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78187edf-1afb-42fa-9bc1-a925c2105a7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.784213] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892138, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.834721] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d94ab5-5e6a-44b5-bd44-243791a5b7ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.843840] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc67b350-3540-4342-8572-d86432ced3e5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.877211] env[68285]: DEBUG nova.network.neutron [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Successfully created port: 9199e860-a70a-4057-93f0-526a4c8a2ed7 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1164.880161] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0e70f6-a8ed-4d75-978d-797eb9a016c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.888950] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6994be13-dbf3-4461-b249-46293115217c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.904967] env[68285]: DEBUG nova.compute.provider_tree [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.929354] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892139, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071486} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.929354] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1164.929660] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ece859-14a7-4a20-bf7c-428f3192232b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.955738] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1164.956555] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b5e342e-5694-452b-87d8-a8cfade642c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.981694] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1164.981694] env[68285]: value = "task-2892140" [ 1164.981694] env[68285]: _type = "Task" [ 1164.981694] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.992308] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892140, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.022404] env[68285]: DEBUG nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1165.100763] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1165.101552] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-761c6092-9e52-486d-9051-9e21063ba7ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.112444] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1165.112444] env[68285]: value = "task-2892141" [ 1165.112444] env[68285]: _type = "Task" [ 1165.112444] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.121679] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892141, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.282278] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892138, 'name': ReconfigVM_Task, 'duration_secs': 0.571102} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.282619] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc/8fd23cb4-45da-4bd9-a258-845eb3f6a1dc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1165.283426] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7bc587b4-fbb6-4692-aa12-0e2f5e948dba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.293516] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1165.293516] env[68285]: value = "task-2892142" [ 1165.293516] env[68285]: _type = "Task" [ 1165.293516] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.303546] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892142, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.409042] env[68285]: DEBUG nova.scheduler.client.report [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.448833] env[68285]: DEBUG nova.network.neutron [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Successfully updated port: 3dffbc03-2bda-47c0-b305-c3f2e1e519bf {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1165.492115] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892140, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.624632] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892141, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.804135] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892142, 'name': Rename_Task, 'duration_secs': 0.378039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.804478] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1165.804864] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac5164b9-5bfd-4268-b8c2-ff4ad2c2b4f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.814973] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1165.814973] env[68285]: value = "task-2892143" [ 1165.814973] env[68285]: _type = "Task" [ 1165.814973] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.837100] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892143, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.915251] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.904s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.918255] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.584s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.919993] env[68285]: INFO nova.compute.claims [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1165.940032] env[68285]: INFO nova.scheduler.client.report [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted allocations for instance 3094ed52-33c2-40ff-ac77-6bb975a2f681 [ 1165.951126] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "refresh_cache-75b9c202-b50d-4c59-b3ef-03e61225a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.951290] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquired lock "refresh_cache-75b9c202-b50d-4c59-b3ef-03e61225a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.951478] env[68285]: DEBUG nova.network.neutron [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1165.992179] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892140, 'name': ReconfigVM_Task, 'duration_secs': 0.941758} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.993132] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1165.993770] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b3c32ae-ed20-4a4e-b238-f062fbcb50b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.001471] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1166.001471] env[68285]: value = "task-2892144" [ 1166.001471] env[68285]: _type = "Task" [ 1166.001471] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.011729] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892144, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.032876] env[68285]: DEBUG nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1166.060376] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1166.060376] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1166.060659] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1166.060746] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1166.060937] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1166.062166] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1166.062166] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1166.062166] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1166.062166] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1166.062166] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1166.062166] env[68285]: DEBUG nova.virt.hardware [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1166.063154] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ef2030-9673-4481-bd68-1ce42df887d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.072753] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e546a9-e825-4933-9421-d008f23a49e5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.100408] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.100731] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.125294] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892141, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.131080] env[68285]: DEBUG nova.compute.manager [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Received event network-vif-plugged-3dffbc03-2bda-47c0-b305-c3f2e1e519bf {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.131398] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] Acquiring lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.131656] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.131849] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.132081] env[68285]: DEBUG nova.compute.manager [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] No waiting events found dispatching network-vif-plugged-3dffbc03-2bda-47c0-b305-c3f2e1e519bf {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1166.132222] env[68285]: WARNING nova.compute.manager [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Received unexpected event network-vif-plugged-3dffbc03-2bda-47c0-b305-c3f2e1e519bf for instance with vm_state building and task_state spawning. [ 1166.132415] env[68285]: DEBUG nova.compute.manager [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Received event network-changed-3dffbc03-2bda-47c0-b305-c3f2e1e519bf {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.132613] env[68285]: DEBUG nova.compute.manager [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Refreshing instance network info cache due to event network-changed-3dffbc03-2bda-47c0-b305-c3f2e1e519bf. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1166.132830] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] Acquiring lock "refresh_cache-75b9c202-b50d-4c59-b3ef-03e61225a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.328034] env[68285]: DEBUG oslo_vmware.api [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892143, 'name': PowerOnVM_Task, 'duration_secs': 0.515058} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.328955] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1166.328955] env[68285]: INFO nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Took 9.51 seconds to spawn the instance on the hypervisor. [ 1166.328955] env[68285]: DEBUG nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.330378] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d23a658-6f5f-482c-b514-55fe9ae94dc0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.383730] env[68285]: DEBUG nova.network.neutron [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Successfully updated port: 9199e860-a70a-4057-93f0-526a4c8a2ed7 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1166.453043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2a79445d-386c-4ae2-a574-77f02919aa8a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "3094ed52-33c2-40ff-ac77-6bb975a2f681" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.849s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.490967] env[68285]: DEBUG nova.network.neutron [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1166.515705] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892144, 'name': Rename_Task, 'duration_secs': 0.166598} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.518188] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.518663] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87cf18e3-3c37-4f70-a13c-514975cbf666 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.526913] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1166.526913] env[68285]: value = "task-2892145" [ 1166.526913] env[68285]: _type = "Task" [ 1166.526913] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.538666] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892145, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.604170] env[68285]: DEBUG nova.compute.utils [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1166.628921] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892141, 'name': CloneVM_Task, 'duration_secs': 1.334688} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.629735] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Created linked-clone VM from snapshot [ 1166.630743] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8f7ef3-8346-4d35-a500-5969c0136457 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.639541] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Uploading image ba8823bf-179d-43d4-8712-d66dd79f84da {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1166.645772] env[68285]: DEBUG nova.compute.manager [req-d25608ad-e28a-415b-b1ff-05d3e66d3d1e req-b9e781a2-59da-4f13-afbe-adb01aac643a service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Received event network-vif-plugged-9199e860-a70a-4057-93f0-526a4c8a2ed7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.645772] env[68285]: DEBUG oslo_concurrency.lockutils [req-d25608ad-e28a-415b-b1ff-05d3e66d3d1e req-b9e781a2-59da-4f13-afbe-adb01aac643a service nova] Acquiring lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.645772] env[68285]: DEBUG oslo_concurrency.lockutils [req-d25608ad-e28a-415b-b1ff-05d3e66d3d1e req-b9e781a2-59da-4f13-afbe-adb01aac643a service nova] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.645959] env[68285]: DEBUG oslo_concurrency.lockutils [req-d25608ad-e28a-415b-b1ff-05d3e66d3d1e req-b9e781a2-59da-4f13-afbe-adb01aac643a service nova] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.646348] env[68285]: DEBUG nova.compute.manager [req-d25608ad-e28a-415b-b1ff-05d3e66d3d1e req-b9e781a2-59da-4f13-afbe-adb01aac643a service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] No waiting events found dispatching network-vif-plugged-9199e860-a70a-4057-93f0-526a4c8a2ed7 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1166.646348] env[68285]: WARNING nova.compute.manager [req-d25608ad-e28a-415b-b1ff-05d3e66d3d1e req-b9e781a2-59da-4f13-afbe-adb01aac643a service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Received unexpected event network-vif-plugged-9199e860-a70a-4057-93f0-526a4c8a2ed7 for instance with vm_state building and task_state spawning. [ 1166.663805] env[68285]: DEBUG nova.network.neutron [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Updating instance_info_cache with network_info: [{"id": "3dffbc03-2bda-47c0-b305-c3f2e1e519bf", "address": "fa:16:3e:46:f1:11", "network": {"id": "0899ce82-1387-4b30-9de4-98a8f4e80454", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1765964231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7fde26ee64641bbb6142d670295de12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dffbc03-2b", "ovs_interfaceid": "3dffbc03-2bda-47c0-b305-c3f2e1e519bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.672878] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1166.672878] env[68285]: value = "vm-581019" [ 1166.672878] env[68285]: _type = "VirtualMachine" [ 1166.672878] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1166.673294] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-de2be575-9356-4c0e-ac33-731e145908b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.683124] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lease: (returnval){ [ 1166.683124] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cf4c4a-4be6-aeda-11bb-6d27dd7c92db" [ 1166.683124] env[68285]: _type = "HttpNfcLease" [ 1166.683124] env[68285]: } obtained for exporting VM: (result){ [ 1166.683124] env[68285]: value = "vm-581019" [ 1166.683124] env[68285]: _type = "VirtualMachine" [ 1166.683124] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1166.683541] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the lease: (returnval){ [ 1166.683541] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cf4c4a-4be6-aeda-11bb-6d27dd7c92db" [ 1166.683541] env[68285]: _type = "HttpNfcLease" [ 1166.683541] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1166.693017] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1166.693017] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cf4c4a-4be6-aeda-11bb-6d27dd7c92db" [ 1166.693017] env[68285]: _type = "HttpNfcLease" [ 1166.693017] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1166.847473] env[68285]: INFO nova.compute.manager [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Took 31.34 seconds to build instance. [ 1166.887043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.887280] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.887476] env[68285]: DEBUG nova.network.neutron [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1167.040246] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892145, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.047322] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.047579] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.107434] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.166818] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Releasing lock "refresh_cache-75b9c202-b50d-4c59-b3ef-03e61225a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.167171] env[68285]: DEBUG nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Instance network_info: |[{"id": "3dffbc03-2bda-47c0-b305-c3f2e1e519bf", "address": "fa:16:3e:46:f1:11", "network": {"id": "0899ce82-1387-4b30-9de4-98a8f4e80454", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1765964231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7fde26ee64641bbb6142d670295de12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dffbc03-2b", "ovs_interfaceid": "3dffbc03-2bda-47c0-b305-c3f2e1e519bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1167.169611] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] Acquired lock "refresh_cache-75b9c202-b50d-4c59-b3ef-03e61225a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.169828] env[68285]: DEBUG nova.network.neutron [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Refreshing network info cache for port 3dffbc03-2bda-47c0-b305-c3f2e1e519bf {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1167.171014] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:f1:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dffbc03-2bda-47c0-b305-c3f2e1e519bf', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1167.178345] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Creating folder: Project (e7fde26ee64641bbb6142d670295de12). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1167.181718] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ca89c0a-86ec-40a4-bc70-e7625dbae0d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.190013] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94284a42-11de-416e-ac07-12cdf414e0eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.194804] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1167.194804] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cf4c4a-4be6-aeda-11bb-6d27dd7c92db" [ 1167.194804] env[68285]: _type = "HttpNfcLease" [ 1167.194804] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1167.196443] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1167.196443] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cf4c4a-4be6-aeda-11bb-6d27dd7c92db" [ 1167.196443] env[68285]: _type = "HttpNfcLease" [ 1167.196443] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1167.196790] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Created folder: Project (e7fde26ee64641bbb6142d670295de12) in parent group-v580775. [ 1167.197554] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Creating folder: Instances. Parent ref: group-v581020. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1167.197693] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62730c1f-7a63-4b81-bd4e-0992cc489a60 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.200734] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f89e41c-ab19-479c-9a32-14a9f2cd1be0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.207020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce81ff01-fc95-485c-959b-c6ba26a4aeb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.213411] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521374a3-6471-1744-8ddc-cb7e78178f8d/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1167.213611] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521374a3-6471-1744-8ddc-cb7e78178f8d/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1167.218314] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Created folder: Instances in parent group-v581020. [ 1167.218615] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1167.219925] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1167.299282] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7e246f2-b308-4246-9e0b-be3a69e95506 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.322998] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fea590-456a-4317-a4ab-b1e5d06b8902 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.334102] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a47480b-5382-4372-93db-87ebecd7247a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.339959] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1167.339959] env[68285]: value = "task-2892149" [ 1167.339959] env[68285]: _type = "Task" [ 1167.339959] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.357098] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79f7256f-6743-4f93-b1f5-00e99fccac32 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.860s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.358172] env[68285]: DEBUG nova.compute.provider_tree [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.368767] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-607302d8-3e14-4b25-bf1a-e3a5e4f300b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.370668] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892149, 'name': CreateVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.440393] env[68285]: DEBUG nova.network.neutron [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1167.516724] env[68285]: DEBUG nova.network.neutron [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Updated VIF entry in instance network info cache for port 3dffbc03-2bda-47c0-b305-c3f2e1e519bf. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1167.517446] env[68285]: DEBUG nova.network.neutron [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Updating instance_info_cache with network_info: [{"id": "3dffbc03-2bda-47c0-b305-c3f2e1e519bf", "address": "fa:16:3e:46:f1:11", "network": {"id": "0899ce82-1387-4b30-9de4-98a8f4e80454", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1765964231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e7fde26ee64641bbb6142d670295de12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dffbc03-2b", "ovs_interfaceid": "3dffbc03-2bda-47c0-b305-c3f2e1e519bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.542411] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892145, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.549640] env[68285]: DEBUG nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1167.584930] env[68285]: DEBUG nova.network.neutron [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance_info_cache with network_info: [{"id": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "address": "fa:16:3e:6f:50:a1", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9199e860-a7", "ovs_interfaceid": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.851822] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892149, 'name': CreateVM_Task, 'duration_secs': 0.41868} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.852102] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1167.852996] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.853231] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.853791] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1167.854101] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e3425d1-66f1-485a-94be-46948f068b70 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.859580] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1167.859580] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529901d4-03a3-0d69-684b-54391f216750" [ 1167.859580] env[68285]: _type = "Task" [ 1167.859580] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.863681] env[68285]: DEBUG nova.scheduler.client.report [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.873323] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529901d4-03a3-0d69-684b-54391f216750, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.021600] env[68285]: DEBUG oslo_concurrency.lockutils [req-3e26ecdd-2678-45f2-9967-fa82f6666eea req-22135113-4a29-4a8b-b49c-f42876ac656e service nova] Releasing lock "refresh_cache-75b9c202-b50d-4c59-b3ef-03e61225a1dc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.040131] env[68285]: DEBUG oslo_vmware.api [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892145, 'name': PowerOnVM_Task, 'duration_secs': 1.018164} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.040715] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1168.041374] env[68285]: INFO nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Took 8.82 seconds to spawn the instance on the hypervisor. [ 1168.041374] env[68285]: DEBUG nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1168.042241] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe05d00-7a92-4b23-b1b0-44e561782fae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.073957] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.089741] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.090140] env[68285]: DEBUG nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Instance network_info: |[{"id": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "address": "fa:16:3e:6f:50:a1", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9199e860-a7", "ovs_interfaceid": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1168.090645] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:50:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9199e860-a70a-4057-93f0-526a4c8a2ed7', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1168.098518] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1168.099732] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1168.100061] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af4bdcd6-19d0-431d-96a4-10820258b48b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.123088] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1168.123088] env[68285]: value = "task-2892150" [ 1168.123088] env[68285]: _type = "Task" [ 1168.123088] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.135415] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892150, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.182400] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.182812] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.184015] env[68285]: INFO nova.compute.manager [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Attaching volume 1c482eea-25f7-44c7-bf99-c576cabd8ee0 to /dev/sdb [ 1168.228863] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4fc75d-7381-4006-b8da-15de76ca1b3c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.237721] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a7177c-ebde-4eb2-bfbb-2efb95b76de7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.262636] env[68285]: DEBUG nova.virt.block_device [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updating existing volume attachment record: c4caf760-bd5c-414a-a71e-58c95fa6fe46 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1168.374777] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.375456] env[68285]: DEBUG nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1168.379046] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529901d4-03a3-0d69-684b-54391f216750, 'name': SearchDatastore_Task, 'duration_secs': 0.015451} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.379046] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.732s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.379251] env[68285]: DEBUG nova.objects.instance [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'resources' on Instance uuid d4f20336-9c29-4aac-8c0d-f577749cd7d7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.381766] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.381766] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1168.385020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.385020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.385020] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1168.385020] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d585c7c8-b31a-4a6b-9b50-0e98e692e534 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.395521] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1168.395716] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1168.396851] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2ab0d28-5fd0-4680-acd5-bcb0ca401f5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.404665] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1168.404665] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f00e50-0276-b9d2-ba88-607ba502df79" [ 1168.404665] env[68285]: _type = "Task" [ 1168.404665] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.414345] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f00e50-0276-b9d2-ba88-607ba502df79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.564967] env[68285]: INFO nova.compute.manager [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Took 32.56 seconds to build instance. [ 1168.634891] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892150, 'name': CreateVM_Task, 'duration_secs': 0.426292} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.635342] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1168.635927] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.636109] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.637398] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1168.637893] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-783a1bd4-2757-4055-8238-12551dbecf7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.643855] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1168.643855] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52340855-cfcd-30b3-b2eb-87d87f2d947b" [ 1168.643855] env[68285]: _type = "Task" [ 1168.643855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.658050] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52340855-cfcd-30b3-b2eb-87d87f2d947b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.669298] env[68285]: DEBUG nova.compute.manager [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Received event network-changed-9199e860-a70a-4057-93f0-526a4c8a2ed7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.669535] env[68285]: DEBUG nova.compute.manager [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Refreshing instance network info cache due to event network-changed-9199e860-a70a-4057-93f0-526a4c8a2ed7. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1168.669767] env[68285]: DEBUG oslo_concurrency.lockutils [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] Acquiring lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.669945] env[68285]: DEBUG oslo_concurrency.lockutils [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] Acquired lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.671370] env[68285]: DEBUG nova.network.neutron [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Refreshing network info cache for port 9199e860-a70a-4057-93f0-526a4c8a2ed7 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.768910] env[68285]: DEBUG nova.compute.manager [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1168.769876] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b1b45d-6f4f-49e4-a3f1-fa8f825764b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.886433] env[68285]: DEBUG nova.compute.utils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1168.887950] env[68285]: DEBUG nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1168.888193] env[68285]: DEBUG nova.network.neutron [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1168.919597] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f00e50-0276-b9d2-ba88-607ba502df79, 'name': SearchDatastore_Task, 'duration_secs': 0.012527} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.920643] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f750f26-fb67-4807-912c-e801182d1de6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.928715] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1168.928715] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521ac7de-400d-11b7-4e5b-b1420d32780b" [ 1168.928715] env[68285]: _type = "Task" [ 1168.928715] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.943840] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521ac7de-400d-11b7-4e5b-b1420d32780b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.008772] env[68285]: DEBUG nova.policy [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0efa62451164f5da7a68edcf2e35755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '762914debb0f446eb24029038a944294', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1169.073360] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0aa22257-4f31-43c9-beac-32fd2fb67de8 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.082s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.158692] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52340855-cfcd-30b3-b2eb-87d87f2d947b, 'name': SearchDatastore_Task, 'duration_secs': 0.016334} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.159144] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.159282] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1169.159496] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.217667] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bab5195-5bb6-4450-8f94-a92569140ca2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.226069] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deceffca-28c6-459e-ac05-f666de1aa9ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.263827] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7525c73e-cdc5-4f53-a67d-50a40579cb20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.276245] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950265ba-2cce-46d6-a39c-76b73532fbee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.284767] env[68285]: INFO nova.compute.manager [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] instance snapshotting [ 1169.285245] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb5c28b-e34d-4424-9b99-3734ae4af1c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.314501] env[68285]: DEBUG nova.compute.provider_tree [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.317516] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481aa3d1-9d38-4269-bad4-2fac8b4f4746 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.323440] env[68285]: DEBUG nova.compute.manager [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Received event network-changed-10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1169.323440] env[68285]: DEBUG nova.compute.manager [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Refreshing instance network info cache due to event network-changed-10900535-c864-4616-a243-0798b3cdb70a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1169.323440] env[68285]: DEBUG oslo_concurrency.lockutils [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] Acquiring lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.323440] env[68285]: DEBUG oslo_concurrency.lockutils [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] Acquired lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.323440] env[68285]: DEBUG nova.network.neutron [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Refreshing network info cache for port 10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1169.325352] env[68285]: DEBUG nova.scheduler.client.report [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.392680] env[68285]: DEBUG nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1169.447021] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521ac7de-400d-11b7-4e5b-b1420d32780b, 'name': SearchDatastore_Task, 'duration_secs': 0.014213} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.449892] env[68285]: DEBUG nova.network.neutron [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updated VIF entry in instance network info cache for port 9199e860-a70a-4057-93f0-526a4c8a2ed7. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.449892] env[68285]: DEBUG nova.network.neutron [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance_info_cache with network_info: [{"id": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "address": "fa:16:3e:6f:50:a1", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9199e860-a7", "ovs_interfaceid": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.449892] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.449892] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 75b9c202-b50d-4c59-b3ef-03e61225a1dc/75b9c202-b50d-4c59-b3ef-03e61225a1dc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1169.450169] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.450338] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1169.451310] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ed5b06c-1aac-4523-99a5-21267d7e8d9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.452443] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3c0821c-9d3e-4d74-af25-1310c3283ceb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.461163] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1169.461163] env[68285]: value = "task-2892152" [ 1169.461163] env[68285]: _type = "Task" [ 1169.461163] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.466013] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1169.466213] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1169.467760] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aa95978-b4aa-473b-94d7-286158723e17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.473134] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.477106] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1169.477106] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52096727-e6ec-74ba-6c0d-d8441ad7e964" [ 1169.477106] env[68285]: _type = "Task" [ 1169.477106] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.488237] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52096727-e6ec-74ba-6c0d-d8441ad7e964, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.492702] env[68285]: DEBUG nova.network.neutron [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Successfully created port: 951c2d6f-07b5-4821-b7fb-b15b90849f2a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1169.832992] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.454s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.835332] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.405s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.835624] env[68285]: DEBUG nova.objects.instance [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lazy-loading 'resources' on Instance uuid 1a040977-b57e-4b67-b259-065b788141de {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.839214] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1169.839802] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-976da5c5-1569-4632-8b2b-7dc29eceae03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.849974] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1169.849974] env[68285]: value = "task-2892153" [ 1169.849974] env[68285]: _type = "Task" [ 1169.849974] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.859718] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892153, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.877433] env[68285]: INFO nova.scheduler.client.report [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted allocations for instance d4f20336-9c29-4aac-8c0d-f577749cd7d7 [ 1169.950977] env[68285]: DEBUG oslo_concurrency.lockutils [req-6688f2f3-9d3f-454e-8df9-8123f545465f req-b07f80b5-9de3-4ca4-8107-2eb08f668f8d service nova] Releasing lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.972367] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892152, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.991092] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52096727-e6ec-74ba-6c0d-d8441ad7e964, 'name': SearchDatastore_Task, 'duration_secs': 0.010212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.992084] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c52dbcd-f6d8-4c01-b021-20f6ab92f74b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.999601] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1169.999601] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d0e8c0-2b50-9231-ba91-1afd8b74985c" [ 1169.999601] env[68285]: _type = "Task" [ 1169.999601] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.009657] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d0e8c0-2b50-9231-ba91-1afd8b74985c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.088235] env[68285]: DEBUG nova.network.neutron [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updated VIF entry in instance network info cache for port 10900535-c864-4616-a243-0798b3cdb70a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1170.088793] env[68285]: DEBUG nova.network.neutron [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.361190] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892153, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.385669] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d097ed31-07f7-4a92-a5c6-6facbd62ef24 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "d4f20336-9c29-4aac-8c0d-f577749cd7d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.427s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.403116] env[68285]: DEBUG nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1170.429339] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1170.429586] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1170.430235] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1170.430235] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1170.430235] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1170.430235] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1170.430533] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1170.430668] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1170.430854] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1170.430965] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1170.431208] env[68285]: DEBUG nova.virt.hardware [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1170.432455] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee610f62-7377-4a8f-960c-7c2ac7c0162a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.444524] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9320cedb-8dc6-4ce3-95ab-2de92f925bbb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.476686] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629775} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.477301] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 75b9c202-b50d-4c59-b3ef-03e61225a1dc/75b9c202-b50d-4c59-b3ef-03e61225a1dc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1170.477301] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1170.477602] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7c8a172-5e07-427e-837f-aab7a7f292b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.485814] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1170.485814] env[68285]: value = "task-2892154" [ 1170.485814] env[68285]: _type = "Task" [ 1170.485814] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.499441] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892154, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.510991] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d0e8c0-2b50-9231-ba91-1afd8b74985c, 'name': SearchDatastore_Task, 'duration_secs': 0.06464} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.513765] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.514030] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] fe8e0a71-e9b0-4035-a696-51455d6fc473/fe8e0a71-e9b0-4035-a696-51455d6fc473.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1170.514477] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15617ef1-686c-40fe-9d80-fa79510f6e0a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.523083] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1170.523083] env[68285]: value = "task-2892155" [ 1170.523083] env[68285]: _type = "Task" [ 1170.523083] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.534534] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.591967] env[68285]: DEBUG oslo_concurrency.lockutils [req-ce8ac485-bbf9-4adf-aa49-1bee4c3712fc req-3c45c1fb-666b-45ed-8e55-ea0481a5722f service nova] Releasing lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.654940] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0236a6e8-5a15-4b78-83a6-f10116118a57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.664107] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef267a87-f659-40b4-8a3f-22a8ffd6c7d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.698876] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8dbc72f-64c1-4b66-9fea-34065a852c5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.707750] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758a4811-c79b-43ac-b8c2-8d97cd24ee6e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.722187] env[68285]: DEBUG nova.compute.provider_tree [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1170.863457] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892153, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.995937] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892154, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136569} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.996320] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1170.997215] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2532d731-8fc3-4226-8a43-ef05dc9ac665 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.035486] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 75b9c202-b50d-4c59-b3ef-03e61225a1dc/75b9c202-b50d-4c59-b3ef-03e61225a1dc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1171.036000] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d9950b7-f8ec-403a-85c2-8f449c441e14 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.060897] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.062704] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1171.062704] env[68285]: value = "task-2892157" [ 1171.062704] env[68285]: _type = "Task" [ 1171.062704] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.070466] env[68285]: DEBUG nova.network.neutron [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Successfully updated port: 951c2d6f-07b5-4821-b7fb-b15b90849f2a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1171.077741] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892157, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.225386] env[68285]: DEBUG nova.scheduler.client.report [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1171.336347] env[68285]: DEBUG nova.compute.manager [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Received event network-vif-plugged-951c2d6f-07b5-4821-b7fb-b15b90849f2a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1171.336643] env[68285]: DEBUG oslo_concurrency.lockutils [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] Acquiring lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.337422] env[68285]: DEBUG oslo_concurrency.lockutils [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.337512] env[68285]: DEBUG oslo_concurrency.lockutils [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.337674] env[68285]: DEBUG nova.compute.manager [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] No waiting events found dispatching network-vif-plugged-951c2d6f-07b5-4821-b7fb-b15b90849f2a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1171.337972] env[68285]: WARNING nova.compute.manager [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Received unexpected event network-vif-plugged-951c2d6f-07b5-4821-b7fb-b15b90849f2a for instance with vm_state building and task_state spawning. [ 1171.338049] env[68285]: DEBUG nova.compute.manager [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Received event network-changed-951c2d6f-07b5-4821-b7fb-b15b90849f2a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1171.338158] env[68285]: DEBUG nova.compute.manager [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Refreshing instance network info cache due to event network-changed-951c2d6f-07b5-4821-b7fb-b15b90849f2a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1171.338340] env[68285]: DEBUG oslo_concurrency.lockutils [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] Acquiring lock "refresh_cache-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.338501] env[68285]: DEBUG oslo_concurrency.lockutils [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] Acquired lock "refresh_cache-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.338719] env[68285]: DEBUG nova.network.neutron [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Refreshing network info cache for port 951c2d6f-07b5-4821-b7fb-b15b90849f2a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1171.362757] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892153, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.547512] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.572368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "refresh_cache-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.577184] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892157, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.730602] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.733074] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.971s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.733284] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.733446] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1171.733830] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.499s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.733997] env[68285]: DEBUG nova.objects.instance [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lazy-loading 'resources' on Instance uuid ce780600-5dc9-4a60-b54e-415cd1766ffb {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.735781] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaee8caa-1fa5-4590-adf3-9a16cb8ef037 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.745877] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb29495-d608-4d28-9bdb-19730a241551 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.767514] env[68285]: INFO nova.scheduler.client.report [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Deleted allocations for instance 1a040977-b57e-4b67-b259-065b788141de [ 1171.769583] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c416ae-22c2-4901-b4b0-e14c6dd402c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.786366] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675e3164-aff3-4a02-ab8a-b9ee7cef1047 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.822259] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179453MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1171.822484] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.865597] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892153, 'name': CreateSnapshot_Task, 'duration_secs': 1.978493} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.866498] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1171.867483] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21da1c5e-cadf-476e-bb6d-42d42e3ec41f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.891036] env[68285]: DEBUG nova.network.neutron [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1172.016811] env[68285]: DEBUG nova.network.neutron [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.048045] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892155, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.075327] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892157, 'name': ReconfigVM_Task, 'duration_secs': 0.705919} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.075593] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 75b9c202-b50d-4c59-b3ef-03e61225a1dc/75b9c202-b50d-4c59-b3ef-03e61225a1dc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1172.076330] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70a94221-2bdc-4f09-acc3-9f045d8a2472 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.085613] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1172.085613] env[68285]: value = "task-2892158" [ 1172.085613] env[68285]: _type = "Task" [ 1172.085613] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.095304] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892158, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.284955] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0b3fb79-6113-4ae5-804c-380394caff6e tempest-ServerShowV247Test-1890882288 tempest-ServerShowV247Test-1890882288-project-member] Lock "1a040977-b57e-4b67-b259-065b788141de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.066s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.388314] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1172.391519] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-468bb79d-4e56-4cd0-981f-4572aa26e6d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.400753] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1172.400753] env[68285]: value = "task-2892159" [ 1172.400753] env[68285]: _type = "Task" [ 1172.400753] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.411621] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892159, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.506427] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5eafe2f-7f34-40d4-8cac-0744dc6c2913 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.516533] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06a0883-fbd7-4e73-a660-209a7d7e07b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.522278] env[68285]: DEBUG oslo_concurrency.lockutils [req-12a2ee86-9ae2-4109-8df4-dd6c3ab52aff req-ffaeece4-cb70-4da6-acbf-b5e2096d3640 service nova] Releasing lock "refresh_cache-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.523858] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquired lock "refresh_cache-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.523858] env[68285]: DEBUG nova.network.neutron [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1172.563279] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8d0637-1e72-4654-bb82-77a04f86d592 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.580298] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892155, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.745406} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.581205] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] fe8e0a71-e9b0-4035-a696-51455d6fc473/fe8e0a71-e9b0-4035-a696-51455d6fc473.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1172.581479] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1172.585651] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f5f299-9b4c-44d7-8198-d5b80032116b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.589765] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4561ea51-639e-42b6-a3ef-455bb3bd3bb5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.607328] env[68285]: DEBUG nova.compute.provider_tree [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1172.613592] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892158, 'name': Rename_Task, 'duration_secs': 0.292541} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.613986] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1172.613986] env[68285]: value = "task-2892160" [ 1172.613986] env[68285]: _type = "Task" [ 1172.613986] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.617254] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1172.617254] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df2582cf-a0b8-469a-a621-2367923f9490 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.632461] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892160, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.633523] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1172.633523] env[68285]: value = "task-2892161" [ 1172.633523] env[68285]: _type = "Task" [ 1172.633523] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.644666] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.825874] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1172.826159] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581024', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'name': 'volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'serial': '1c482eea-25f7-44c7-bf99-c576cabd8ee0'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1172.827567] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072fb097-c9c7-431d-a886-7195a5901288 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.847499] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff3b251-e059-44f5-81e3-4d74c5ed3ba5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.878507] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0/volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.878507] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a6d10a2-f44c-4066-8618-e47d45179f31 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.899504] env[68285]: DEBUG oslo_vmware.api [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1172.899504] env[68285]: value = "task-2892162" [ 1172.899504] env[68285]: _type = "Task" [ 1172.899504] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.914443] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892159, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.918053] env[68285]: DEBUG oslo_vmware.api [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.058715] env[68285]: DEBUG nova.network.neutron [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1173.128243] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892160, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092453} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.131281] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1173.132241] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92002c31-8be6-4ef9-9392-dc08c26bd057 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.136346] env[68285]: ERROR nova.scheduler.client.report [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [req-c7e7eb95-7924-4a66-b5c7-eb97a4616098] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c7e7eb95-7924-4a66-b5c7-eb97a4616098"}]} [ 1173.149081] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892161, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.167624] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] fe8e0a71-e9b0-4035-a696-51455d6fc473/fe8e0a71-e9b0-4035-a696-51455d6fc473.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.168905] env[68285]: DEBUG nova.scheduler.client.report [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1173.170915] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-241fbd97-092f-4706-b6fd-2fc14d694708 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.196180] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1173.196180] env[68285]: value = "task-2892163" [ 1173.196180] env[68285]: _type = "Task" [ 1173.196180] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.200393] env[68285]: DEBUG nova.scheduler.client.report [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1173.200650] env[68285]: DEBUG nova.compute.provider_tree [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1173.209506] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892163, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.220418] env[68285]: DEBUG nova.scheduler.client.report [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1173.232661] env[68285]: DEBUG nova.network.neutron [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Updating instance_info_cache with network_info: [{"id": "951c2d6f-07b5-4821-b7fb-b15b90849f2a", "address": "fa:16:3e:1e:d4:7a", "network": {"id": "e9f9f294-50d7-49be-aa34-1cad5162d362", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-166381133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "762914debb0f446eb24029038a944294", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951c2d6f-07", "ovs_interfaceid": "951c2d6f-07b5-4821-b7fb-b15b90849f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.239507] env[68285]: DEBUG nova.scheduler.client.report [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1173.339497] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "68aee959-4168-43a7-a8d1-e6e126a52da5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.339763] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.420099] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892159, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.420522] env[68285]: DEBUG oslo_vmware.api [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.538057] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3092352-7b06-4891-87ad-8d8bc8212880 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.549121] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e9e299-c6e4-4b43-a93c-30fd3fcf3d56 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.581161] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb84f538-10ab-4429-a7dd-e46fed6b10e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.590271] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e2b893-afae-4e81-a390-3cb35c4497e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.606275] env[68285]: DEBUG nova.compute.provider_tree [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1173.648041] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892161, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.708030] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892163, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.735952] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Releasing lock "refresh_cache-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.736447] env[68285]: DEBUG nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Instance network_info: |[{"id": "951c2d6f-07b5-4821-b7fb-b15b90849f2a", "address": "fa:16:3e:1e:d4:7a", "network": {"id": "e9f9f294-50d7-49be-aa34-1cad5162d362", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-166381133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "762914debb0f446eb24029038a944294", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5291d0-ee0f-4d70-b2ae-ab6879a67b08", "external-id": "nsx-vlan-transportzone-597", "segmentation_id": 597, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951c2d6f-07", "ovs_interfaceid": "951c2d6f-07b5-4821-b7fb-b15b90849f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1173.736939] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:d4:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5291d0-ee0f-4d70-b2ae-ab6879a67b08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '951c2d6f-07b5-4821-b7fb-b15b90849f2a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.746275] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Creating folder: Project (762914debb0f446eb24029038a944294). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1173.747100] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7be52041-011e-4f2c-af87-daa699c76476 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.759988] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Created folder: Project (762914debb0f446eb24029038a944294) in parent group-v580775. [ 1173.760096] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Creating folder: Instances. Parent ref: group-v581027. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1173.760467] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3975a13a-e5e8-47dc-9adf-03ca2febd030 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.771940] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Created folder: Instances in parent group-v581027. [ 1173.772244] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1173.772449] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1173.772661] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7b42935-334b-42ca-b469-40fc02912787 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.793551] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.793551] env[68285]: value = "task-2892166" [ 1173.793551] env[68285]: _type = "Task" [ 1173.793551] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.803261] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892166, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.843597] env[68285]: DEBUG nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1173.920939] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892159, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.921325] env[68285]: DEBUG oslo_vmware.api [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892162, 'name': ReconfigVM_Task, 'duration_secs': 0.903208} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.921527] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfigured VM instance instance-00000052 to attach disk [datastore2] volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0/volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1173.926962] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5bf6ec8-ff77-49a3-b139-7604d6a41951 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.954126] env[68285]: DEBUG oslo_vmware.api [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1173.954126] env[68285]: value = "task-2892167" [ 1173.954126] env[68285]: _type = "Task" [ 1173.954126] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.969753] env[68285]: DEBUG oslo_vmware.api [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.149501] env[68285]: DEBUG oslo_vmware.api [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892161, 'name': PowerOnVM_Task, 'duration_secs': 1.222066} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.149929] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1174.150253] env[68285]: INFO nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1174.150510] env[68285]: DEBUG nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1174.151377] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add71047-e3cd-4b80-9b61-64d777240a82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.168968] env[68285]: DEBUG nova.scheduler.client.report [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1174.169255] env[68285]: DEBUG nova.compute.provider_tree [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 126 to 127 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1174.169434] env[68285]: DEBUG nova.compute.provider_tree [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1174.209286] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892163, 'name': ReconfigVM_Task, 'duration_secs': 0.668509} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.209588] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Reconfigured VM instance instance-0000005b to attach disk [datastore2] fe8e0a71-e9b0-4035-a696-51455d6fc473/fe8e0a71-e9b0-4035-a696-51455d6fc473.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.210363] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa64e27f-4edf-46a0-a624-19d52e9db191 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.218544] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1174.218544] env[68285]: value = "task-2892168" [ 1174.218544] env[68285]: _type = "Task" [ 1174.218544] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.229966] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892168, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.306805] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892166, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.368117] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.420318] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892159, 'name': CloneVM_Task, 'duration_secs': 1.749576} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.420664] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Created linked-clone VM from snapshot [ 1174.421474] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d8fb0a-3c46-4b42-9317-cb1a59936d6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.429892] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Uploading image 6a6aeaf1-3b71-4ca7-af37-384a8a742447 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1174.455747] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1174.455747] env[68285]: value = "vm-581026" [ 1174.455747] env[68285]: _type = "VirtualMachine" [ 1174.455747] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1174.456208] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-dc6fd2b7-d51b-49f2-b0b1-f580d5ef0763 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.469202] env[68285]: DEBUG oslo_vmware.api [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892167, 'name': ReconfigVM_Task, 'duration_secs': 0.262502} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.470922] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581024', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'name': 'volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'serial': '1c482eea-25f7-44c7-bf99-c576cabd8ee0'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1174.472451] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lease: (returnval){ [ 1174.472451] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5209fb8d-fc90-c684-1451-c9b66ac4e92b" [ 1174.472451] env[68285]: _type = "HttpNfcLease" [ 1174.472451] env[68285]: } obtained for exporting VM: (result){ [ 1174.472451] env[68285]: value = "vm-581026" [ 1174.472451] env[68285]: _type = "VirtualMachine" [ 1174.472451] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1174.472715] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the lease: (returnval){ [ 1174.472715] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5209fb8d-fc90-c684-1451-c9b66ac4e92b" [ 1174.472715] env[68285]: _type = "HttpNfcLease" [ 1174.472715] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1174.480185] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1174.480185] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5209fb8d-fc90-c684-1451-c9b66ac4e92b" [ 1174.480185] env[68285]: _type = "HttpNfcLease" [ 1174.480185] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1174.670124] env[68285]: INFO nova.compute.manager [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Took 35.28 seconds to build instance. [ 1174.678156] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.944s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.683480] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.321s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.694627] env[68285]: DEBUG nova.objects.instance [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1174.731926] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892168, 'name': Rename_Task, 'duration_secs': 0.218756} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.732739] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1174.733252] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24f9ddf3-637c-481f-bc9a-7eb5c8d89ff8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.745182] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1174.745182] env[68285]: value = "task-2892170" [ 1174.745182] env[68285]: _type = "Task" [ 1174.745182] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.749244] env[68285]: INFO nova.scheduler.client.report [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted allocations for instance ce780600-5dc9-4a60-b54e-415cd1766ffb [ 1174.757554] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.817918] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892166, 'name': CreateVM_Task, 'duration_secs': 0.582284} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.818179] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1174.819145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.819406] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.819842] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1174.822651] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22bb3c45-57b4-4656-9fd1-8866555c18c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.829833] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1174.829833] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524437be-4b7b-a380-788f-faa6f1bb5d55" [ 1174.829833] env[68285]: _type = "Task" [ 1174.829833] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.846778] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524437be-4b7b-a380-788f-faa6f1bb5d55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.996859] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1174.996859] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5209fb8d-fc90-c684-1451-c9b66ac4e92b" [ 1174.996859] env[68285]: _type = "HttpNfcLease" [ 1174.996859] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1174.997278] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1174.997278] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5209fb8d-fc90-c684-1451-c9b66ac4e92b" [ 1174.997278] env[68285]: _type = "HttpNfcLease" [ 1174.997278] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1174.998474] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a359c5-f3f1-450a-8f31-8c4d1fce5000 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.027670] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a78928-2ee2-6f94-2ac7-ae660d24eaf2/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1175.027670] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a78928-2ee2-6f94-2ac7-ae660d24eaf2/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1175.147776] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-076529b2-0e16-43d4-a9b0-4f47fbbb5780 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.173771] env[68285]: DEBUG oslo_concurrency.lockutils [None req-23d1f2d4-fd74-4958-86b0-f9fd9c736bca tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.792s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.255689] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892170, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.262049] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a17454b7-4b8e-40ef-869c-71db103892c9 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ce780600-5dc9-4a60-b54e-415cd1766ffb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.602s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.347077] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524437be-4b7b-a380-788f-faa6f1bb5d55, 'name': SearchDatastore_Task, 'duration_secs': 0.014569} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.347469] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.347761] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1175.348065] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.348274] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.348459] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1175.348781] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-698c0597-7ba3-40ac-8039-484b1a204c84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.364349] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1175.364902] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1175.366045] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d119ecb7-205b-4b0c-9f49-a321fa9a70e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.371051] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.371208] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.371889] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.372232] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.372441] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.375341] env[68285]: INFO nova.compute.manager [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Terminating instance [ 1175.379355] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1175.379355] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521f196b-ec70-a777-3ecb-08b245316091" [ 1175.379355] env[68285]: _type = "Task" [ 1175.379355] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.393985] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521f196b-ec70-a777-3ecb-08b245316091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.595727] env[68285]: DEBUG nova.objects.instance [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 7790f1e6-c73f-40d6-97af-00e9c518a09c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1175.696398] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ede44ac4-453a-4047-b839-07435addd256 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.697637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.642s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.699431] env[68285]: INFO nova.compute.claims [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1175.758562] env[68285]: DEBUG oslo_vmware.api [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892170, 'name': PowerOnVM_Task, 'duration_secs': 0.644351} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.759128] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1175.759442] env[68285]: INFO nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Took 9.73 seconds to spawn the instance on the hypervisor. [ 1175.759733] env[68285]: DEBUG nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1175.760709] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fef897-88a3-4015-a813-3de3593dd950 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.880836] env[68285]: DEBUG nova.compute.manager [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1175.881089] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1175.882177] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05f52a3-8564-44b0-a531-a922e8ca6457 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.888922] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "d0b04097-292a-47e7-8f14-199b1650dc2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.889310] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.889573] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "d0b04097-292a-47e7-8f14-199b1650dc2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.889815] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.890133] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.892980] env[68285]: INFO nova.compute.manager [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Terminating instance [ 1175.900658] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521f196b-ec70-a777-3ecb-08b245316091, 'name': SearchDatastore_Task, 'duration_secs': 0.014303} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.901074] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1175.903015] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-204ed6f0-57a2-4e44-b5ee-c3f2c0a6cabd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.905139] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6433d6f1-544b-4c1e-b256-9474d8bf1985 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.912552] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1175.912552] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523abbfb-1327-d67a-a26f-e2c6e8edb6cb" [ 1175.912552] env[68285]: _type = "Task" [ 1175.912552] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.914787] env[68285]: DEBUG oslo_vmware.api [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1175.914787] env[68285]: value = "task-2892171" [ 1175.914787] env[68285]: _type = "Task" [ 1175.914787] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.926523] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523abbfb-1327-d67a-a26f-e2c6e8edb6cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.930097] env[68285]: DEBUG oslo_vmware.api [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.101535] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a3ed27e-19a7-4036-9c70-8b0058541296 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.919s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.280835] env[68285]: INFO nova.compute.manager [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Took 35.10 seconds to build instance. [ 1176.406826] env[68285]: DEBUG nova.compute.manager [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1176.407783] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.409657] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e42ceb7-7ca4-4db0-9372-ff8be321ef5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.422048] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.425838] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bea182a-72bc-4bae-893b-959fde42d807 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.437231] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523abbfb-1327-d67a-a26f-e2c6e8edb6cb, 'name': SearchDatastore_Task, 'duration_secs': 0.015718} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.437231] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.437231] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d/08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1176.437231] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1fab4e5-c862-4b98-9b0c-e3ba06189f21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.442757] env[68285]: DEBUG oslo_vmware.api [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1176.442757] env[68285]: value = "task-2892172" [ 1176.442757] env[68285]: _type = "Task" [ 1176.442757] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.443885] env[68285]: DEBUG oslo_vmware.api [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892171, 'name': PowerOffVM_Task, 'duration_secs': 0.360628} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.443885] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.444385] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.447155] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a74633b-ab29-46a6-991f-614a66bf87ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.449643] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1176.449643] env[68285]: value = "task-2892173" [ 1176.449643] env[68285]: _type = "Task" [ 1176.449643] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.462120] env[68285]: DEBUG oslo_vmware.api [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.466432] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "ef87ff30-ef45-4abb-8696-d5493572703a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.466766] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ef87ff30-ef45-4abb-8696-d5493572703a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.473567] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.556524] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.556524] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.556524] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Deleting the datastore file [datastore2] 75b9c202-b50d-4c59-b3ef-03e61225a1dc {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.556524] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db24c19d-6b09-4310-a858-c7ee36b768f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.565833] env[68285]: DEBUG oslo_vmware.api [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for the task: (returnval){ [ 1176.565833] env[68285]: value = "task-2892175" [ 1176.565833] env[68285]: _type = "Task" [ 1176.565833] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.574775] env[68285]: DEBUG oslo_vmware.api [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.774977] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521374a3-6471-1744-8ddc-cb7e78178f8d/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1176.776416] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c113abf6-5d2b-452b-a3ed-e81b152641b2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.782881] env[68285]: DEBUG oslo_concurrency.lockutils [None req-597e9dbb-ad7c-49d8-8a0a-cda1e564f816 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.619s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.786927] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521374a3-6471-1744-8ddc-cb7e78178f8d/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1176.787105] env[68285]: ERROR oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521374a3-6471-1744-8ddc-cb7e78178f8d/disk-0.vmdk due to incomplete transfer. [ 1176.787450] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-604b92ce-e738-4655-bf57-ee65fea594fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.795404] env[68285]: DEBUG oslo_vmware.rw_handles [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521374a3-6471-1744-8ddc-cb7e78178f8d/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1176.795730] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Uploaded image ba8823bf-179d-43d4-8712-d66dd79f84da to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1176.798464] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1176.802084] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-872028a8-2008-48da-86e0-67ab301bdefe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.809920] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1176.809920] env[68285]: value = "task-2892176" [ 1176.809920] env[68285]: _type = "Task" [ 1176.809920] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.823521] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892176, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.958175] env[68285]: DEBUG oslo_vmware.api [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892172, 'name': PowerOffVM_Task, 'duration_secs': 0.269163} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.958955] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.959182] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.959501] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e7de8d8-26c4-45ba-8582-4722c31cb3d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.965154] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892173, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.975093] env[68285]: DEBUG nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1177.022233] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbde71b-a595-43ba-b083-f0f7bc309c06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.030073] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1817d4cb-7bf6-4976-8a66-a76c4618604f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.066427] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169d70ce-508d-404c-af62-94651e6f713e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.073442] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1177.073645] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1177.073914] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleting the datastore file [datastore2] d0b04097-292a-47e7-8f14-199b1650dc2c {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1177.077779] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b541718-be4e-4eb5-8d66-fb40ee23d167 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.089354] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d6cde0-9479-4e26-95c7-9963799d559a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.094844] env[68285]: DEBUG oslo_vmware.api [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Task: {'id': task-2892175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.457621} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.096536] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1177.096640] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1177.096808] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1177.097080] env[68285]: INFO nova.compute.manager [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1177.097442] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.097811] env[68285]: DEBUG oslo_vmware.api [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1177.097811] env[68285]: value = "task-2892178" [ 1177.097811] env[68285]: _type = "Task" [ 1177.097811] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.098702] env[68285]: DEBUG nova.compute.manager [-] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1177.098842] env[68285]: DEBUG nova.network.neutron [-] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.113467] env[68285]: DEBUG nova.compute.provider_tree [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1177.120752] env[68285]: DEBUG oslo_vmware.api [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.320739] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892176, 'name': Destroy_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.461020] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892173, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568714} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.462743] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d/08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1177.462990] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1177.463452] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-614e3951-9343-4441-aed3-5ab04ef1633a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.471087] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1177.471087] env[68285]: value = "task-2892179" [ 1177.471087] env[68285]: _type = "Task" [ 1177.471087] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.484403] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.486957] env[68285]: DEBUG nova.compute.manager [req-3e10fcbe-537a-4473-9a27-7ff872538180 req-8c5f14ec-531b-4239-ad9b-44cbd7d5e679 service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Received event network-vif-deleted-3dffbc03-2bda-47c0-b305-c3f2e1e519bf {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.487198] env[68285]: INFO nova.compute.manager [req-3e10fcbe-537a-4473-9a27-7ff872538180 req-8c5f14ec-531b-4239-ad9b-44cbd7d5e679 service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Neutron deleted interface 3dffbc03-2bda-47c0-b305-c3f2e1e519bf; detaching it from the instance and deleting it from the info cache [ 1177.487381] env[68285]: DEBUG nova.network.neutron [req-3e10fcbe-537a-4473-9a27-7ff872538180 req-8c5f14ec-531b-4239-ad9b-44cbd7d5e679 service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.506823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.579130] env[68285]: DEBUG nova.compute.manager [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1177.614529] env[68285]: DEBUG oslo_vmware.api [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186957} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.614935] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1177.615082] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1177.615263] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1177.615474] env[68285]: INFO nova.compute.manager [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1177.615660] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.618647] env[68285]: DEBUG nova.compute.manager [-] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1177.618748] env[68285]: DEBUG nova.network.neutron [-] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.638705] env[68285]: ERROR nova.scheduler.client.report [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [req-084dd269-9841-47f0-a2b3-0dbd1c3a3807] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-084dd269-9841-47f0-a2b3-0dbd1c3a3807"}]} [ 1177.657302] env[68285]: DEBUG nova.scheduler.client.report [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1177.671958] env[68285]: DEBUG nova.scheduler.client.report [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1177.672212] env[68285]: DEBUG nova.compute.provider_tree [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1177.676760] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.676981] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.691222] env[68285]: DEBUG nova.scheduler.client.report [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1177.711189] env[68285]: DEBUG nova.scheduler.client.report [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1177.822871] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892176, 'name': Destroy_Task, 'duration_secs': 0.511967} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.822871] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Destroyed the VM [ 1177.822871] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1177.823148] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3a5cef64-577d-4969-ac39-517430c6ca3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.830432] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1177.830432] env[68285]: value = "task-2892180" [ 1177.830432] env[68285]: _type = "Task" [ 1177.830432] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.840196] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892180, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.964076] env[68285]: DEBUG nova.network.neutron [-] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.985933] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085248} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.986313] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1177.987553] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91eeb13-7520-49c0-b83b-2c1a9c3ff392 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.995184] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b08803dd-c2f2-4dbe-8c84-b5169fb9db7f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.016548] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d/08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1178.019958] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2613204a-9639-4db1-b465-6c06c2d396d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.040149] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224428de-d378-4907-b7a5-fdf0ba6bb820 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.053138] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658475d6-28f9-4a22-91d5-3e0743a5ccdf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.055925] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1178.055925] env[68285]: value = "task-2892181" [ 1178.055925] env[68285]: _type = "Task" [ 1178.055925] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.063465] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcae3ca-68f2-4d7e-96b2-55891e10eff0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.084035] env[68285]: DEBUG nova.compute.manager [req-3e10fcbe-537a-4473-9a27-7ff872538180 req-8c5f14ec-531b-4239-ad9b-44cbd7d5e679 service nova] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Detach interface failed, port_id=3dffbc03-2bda-47c0-b305-c3f2e1e519bf, reason: Instance 75b9c202-b50d-4c59-b3ef-03e61225a1dc could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1178.084722] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892181, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.117729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.118577] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cfb970-571e-4701-913b-47a559628a71 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.126359] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad78e37-0774-4db2-9582-15d9d3888735 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.141061] env[68285]: DEBUG nova.compute.provider_tree [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1178.181489] env[68285]: DEBUG nova.compute.utils [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1178.342095] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892180, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.467147] env[68285]: INFO nova.compute.manager [-] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Took 1.37 seconds to deallocate network for instance. [ 1178.567448] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892181, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.674668] env[68285]: DEBUG nova.scheduler.client.report [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1178.675868] env[68285]: DEBUG nova.compute.provider_tree [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 130 to 131 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1178.675868] env[68285]: DEBUG nova.compute.provider_tree [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1178.683350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.724932] env[68285]: DEBUG nova.network.neutron [-] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.841788] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892180, 'name': RemoveSnapshot_Task, 'duration_secs': 0.612796} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.841851] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1178.847795] env[68285]: DEBUG nova.compute.manager [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1178.847795] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2737d2ee-f162-46d8-b089-4ae793f67e5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.973975] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.066734] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892181, 'name': ReconfigVM_Task, 'duration_secs': 0.535708} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.067030] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d/08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1179.067653] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69217477-8c85-48b3-a82f-3ff3e3327a4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.074217] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1179.074217] env[68285]: value = "task-2892182" [ 1179.074217] env[68285]: _type = "Task" [ 1179.074217] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.081605] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892182, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.180986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.483s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.181537] env[68285]: DEBUG nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1179.184455] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.209s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.187398] env[68285]: INFO nova.compute.claims [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1179.230829] env[68285]: INFO nova.compute.manager [-] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Took 1.61 seconds to deallocate network for instance. [ 1179.355681] env[68285]: INFO nova.compute.manager [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Shelve offloading [ 1179.514265] env[68285]: DEBUG nova.compute.manager [req-52e18e9c-8953-4369-87f7-b37de1eabcae req-36c12d62-9394-4b57-8206-96b374d9fddb service nova] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Received event network-vif-deleted-de764e97-9703-4359-9800-31118f814f1e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1179.584926] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892182, 'name': Rename_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.690659] env[68285]: DEBUG nova.compute.utils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1179.694043] env[68285]: DEBUG nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1179.694214] env[68285]: DEBUG nova.network.neutron [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1179.742215] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.754174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.754444] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.754687] env[68285]: INFO nova.compute.manager [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Attaching volume 3ec9591e-5da4-46d7-a97b-a79b2fb4dc42 to /dev/sdc [ 1179.768538] env[68285]: DEBUG nova.policy [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6b4e2d57fba54805a9ea3f4646525f2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fc2409b791d45d7804229456f013b84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1179.789686] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845c872a-343f-44c3-8ff8-1805bb3032fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.797203] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698aa68f-7b72-43df-9ec5-8be5a345608c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.815421] env[68285]: DEBUG nova.virt.block_device [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updating existing volume attachment record: 79438749-f2e6-407c-bb4d-77dc94cf50a9 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1179.858768] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.859172] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a369864-ac4b-4297-bd4e-594c5865f614 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.866335] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1179.866335] env[68285]: value = "task-2892183" [ 1179.866335] env[68285]: _type = "Task" [ 1179.866335] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.087141] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892182, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.195434] env[68285]: DEBUG nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1180.255870] env[68285]: DEBUG nova.network.neutron [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Successfully created port: 2dc847c5-91ee-448c-b47b-5c2a16f2cf1e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1180.383169] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1180.383169] env[68285]: DEBUG nova.compute.manager [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1180.384258] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65766f2e-c1e8-49c9-977a-a5de4d228745 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.389980] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.390037] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.390650] env[68285]: DEBUG nova.network.neutron [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1180.518556] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77c64ac-683c-47a0-9ae2-4c227ff01dfa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.525821] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c17c763-3b08-49c5-bb84-5aec777e0b93 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.559892] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4da2b88-1fb7-4b5b-a4b0-1bc3acaaf62e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.567565] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c80870-9cf2-488d-af85-1b78c5fc8652 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.582758] env[68285]: DEBUG nova.compute.provider_tree [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.592552] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892182, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.086304] env[68285]: DEBUG nova.scheduler.client.report [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.095573] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892182, 'name': Rename_Task, 'duration_secs': 1.808425} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.096529] env[68285]: DEBUG nova.network.neutron [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [{"id": "724df450-925b-47ae-884b-4935b5b95ab2", "address": "fa:16:3e:0f:59:8c", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap724df450-92", "ovs_interfaceid": "724df450-925b-47ae-884b-4935b5b95ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.097665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1181.098445] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdeff9d6-28d8-48fd-aed6-c7d303e3212a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.107848] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1181.107848] env[68285]: value = "task-2892185" [ 1181.107848] env[68285]: _type = "Task" [ 1181.107848] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.116177] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892185, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.209224] env[68285]: DEBUG nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1181.236345] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1181.236593] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1181.236778] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1181.236988] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1181.237154] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1181.237305] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1181.237512] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1181.237668] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1181.237834] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1181.237994] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1181.238184] env[68285]: DEBUG nova.virt.hardware [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1181.239053] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe737fa-fb0c-459f-a3cc-a1968dd0ebc2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.247587] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82a18e3-2b53-494d-90ef-5aa7acbb5480 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.593069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.593069] env[68285]: DEBUG nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1181.595240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.973s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.595457] env[68285]: DEBUG nova.objects.instance [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lazy-loading 'resources' on Instance uuid 94652533-8c34-42fa-8d70-4effc307ec71 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.600916] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.618488] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892185, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.785792] env[68285]: DEBUG nova.compute.manager [req-a7207ddf-c763-4607-a0ff-7640df834805 req-be38af40-a3f0-4c4a-ae4c-159e5c491eca service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Received event network-vif-plugged-2dc847c5-91ee-448c-b47b-5c2a16f2cf1e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.786061] env[68285]: DEBUG oslo_concurrency.lockutils [req-a7207ddf-c763-4607-a0ff-7640df834805 req-be38af40-a3f0-4c4a-ae4c-159e5c491eca service nova] Acquiring lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.786382] env[68285]: DEBUG oslo_concurrency.lockutils [req-a7207ddf-c763-4607-a0ff-7640df834805 req-be38af40-a3f0-4c4a-ae4c-159e5c491eca service nova] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.786513] env[68285]: DEBUG oslo_concurrency.lockutils [req-a7207ddf-c763-4607-a0ff-7640df834805 req-be38af40-a3f0-4c4a-ae4c-159e5c491eca service nova] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.786708] env[68285]: DEBUG nova.compute.manager [req-a7207ddf-c763-4607-a0ff-7640df834805 req-be38af40-a3f0-4c4a-ae4c-159e5c491eca service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] No waiting events found dispatching network-vif-plugged-2dc847c5-91ee-448c-b47b-5c2a16f2cf1e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1181.786857] env[68285]: WARNING nova.compute.manager [req-a7207ddf-c763-4607-a0ff-7640df834805 req-be38af40-a3f0-4c4a-ae4c-159e5c491eca service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Received unexpected event network-vif-plugged-2dc847c5-91ee-448c-b47b-5c2a16f2cf1e for instance with vm_state building and task_state spawning. [ 1181.848603] env[68285]: DEBUG nova.compute.manager [req-ab2a57de-64b6-41fa-983e-73a20b8fb4c3 req-4c10d283-eae4-4050-a478-1e5c6c31b351 service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received event network-vif-unplugged-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.848829] env[68285]: DEBUG oslo_concurrency.lockutils [req-ab2a57de-64b6-41fa-983e-73a20b8fb4c3 req-4c10d283-eae4-4050-a478-1e5c6c31b351 service nova] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.849100] env[68285]: DEBUG oslo_concurrency.lockutils [req-ab2a57de-64b6-41fa-983e-73a20b8fb4c3 req-4c10d283-eae4-4050-a478-1e5c6c31b351 service nova] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.849316] env[68285]: DEBUG oslo_concurrency.lockutils [req-ab2a57de-64b6-41fa-983e-73a20b8fb4c3 req-4c10d283-eae4-4050-a478-1e5c6c31b351 service nova] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.849585] env[68285]: DEBUG nova.compute.manager [req-ab2a57de-64b6-41fa-983e-73a20b8fb4c3 req-4c10d283-eae4-4050-a478-1e5c6c31b351 service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] No waiting events found dispatching network-vif-unplugged-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1181.849815] env[68285]: WARNING nova.compute.manager [req-ab2a57de-64b6-41fa-983e-73a20b8fb4c3 req-4c10d283-eae4-4050-a478-1e5c6c31b351 service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received unexpected event network-vif-unplugged-724df450-925b-47ae-884b-4935b5b95ab2 for instance with vm_state shelved and task_state shelving_offloading. [ 1181.867787] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1181.868744] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53048a53-940d-45a1-8ea9-d9af1806864a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.872361] env[68285]: DEBUG nova.network.neutron [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Successfully updated port: 2dc847c5-91ee-448c-b47b-5c2a16f2cf1e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1181.878543] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1181.878831] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75b62d96-255d-4816-9a06-4f3f046ca89b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.951473] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1181.952093] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1181.952093] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleting the datastore file [datastore1] be47df2a-aee7-4275-9acb-9cf74367f503 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1181.952184] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5995f1a6-b837-4e23-8271-a99dd6cd749b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.959370] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1181.959370] env[68285]: value = "task-2892187" [ 1181.959370] env[68285]: _type = "Task" [ 1181.959370] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.967009] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.100038] env[68285]: DEBUG nova.compute.utils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.105268] env[68285]: DEBUG nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1182.105523] env[68285]: DEBUG nova.network.neutron [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1182.119743] env[68285]: DEBUG oslo_vmware.api [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892185, 'name': PowerOnVM_Task, 'duration_secs': 0.708977} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.119743] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1182.120024] env[68285]: INFO nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Took 11.72 seconds to spawn the instance on the hypervisor. [ 1182.120313] env[68285]: DEBUG nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1182.121212] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13727bb-651e-43aa-ad33-393014178bdf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.178163] env[68285]: DEBUG nova.policy [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '184360cab7224b9eaef80dfe89d0208b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288595d9298e43fa859bc6b68054aa08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1182.374702] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "refresh_cache-9175fd25-a00c-4a2c-b779-56e6541dcaa1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.375070] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquired lock "refresh_cache-9175fd25-a00c-4a2c-b779-56e6541dcaa1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.375152] env[68285]: DEBUG nova.network.neutron [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1182.411042] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfda8f7-daf9-4576-96f4-cac86a32390a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.419180] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d5960f-724e-48c4-b5d9-ea2c5d7f4669 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.450638] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae520528-4367-4796-8b35-a95dcc86eb05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.460184] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0981f8-c26a-4dcc-94ff-55d147abaaee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.474753] env[68285]: DEBUG oslo_vmware.api [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.333931} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.483065] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1182.483296] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1182.487148] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1182.487148] env[68285]: DEBUG nova.compute.provider_tree [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.507967] env[68285]: INFO nova.scheduler.client.report [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleted allocations for instance be47df2a-aee7-4275-9acb-9cf74367f503 [ 1182.529102] env[68285]: DEBUG nova.network.neutron [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Successfully created port: 20c0e4b6-0469-4732-b880-d852321ef348 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.609430] env[68285]: DEBUG nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1182.644358] env[68285]: INFO nova.compute.manager [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Took 34.34 seconds to build instance. [ 1182.913718] env[68285]: DEBUG nova.network.neutron [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1182.989699] env[68285]: DEBUG nova.scheduler.client.report [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1183.012514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.063925] env[68285]: DEBUG nova.network.neutron [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Updating instance_info_cache with network_info: [{"id": "2dc847c5-91ee-448c-b47b-5c2a16f2cf1e", "address": "fa:16:3e:63:b7:89", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc847c5-91", "ovs_interfaceid": "2dc847c5-91ee-448c-b47b-5c2a16f2cf1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.147293] env[68285]: DEBUG oslo_concurrency.lockutils [None req-025c3b5a-58f7-4e4a-a3f7-0f7e228f2f4c tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.847s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.325967] env[68285]: DEBUG oslo_concurrency.lockutils [None req-af180208-a5df-47f2-9f61-13fc36a11ff8 tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "interface-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.325967] env[68285]: DEBUG oslo_concurrency.lockutils [None req-af180208-a5df-47f2-9f61-13fc36a11ff8 tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "interface-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.326330] env[68285]: DEBUG nova.objects.instance [None req-af180208-a5df-47f2-9f61-13fc36a11ff8 tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lazy-loading 'flavor' on Instance uuid 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.495141] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.899s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.499107] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.425s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.500483] env[68285]: INFO nova.compute.claims [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1183.533216] env[68285]: INFO nova.scheduler.client.report [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Deleted allocations for instance 94652533-8c34-42fa-8d70-4effc307ec71 [ 1183.567672] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Releasing lock "refresh_cache-9175fd25-a00c-4a2c-b779-56e6541dcaa1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.568099] env[68285]: DEBUG nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Instance network_info: |[{"id": "2dc847c5-91ee-448c-b47b-5c2a16f2cf1e", "address": "fa:16:3e:63:b7:89", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc847c5-91", "ovs_interfaceid": "2dc847c5-91ee-448c-b47b-5c2a16f2cf1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1183.568540] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:b7:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dc847c5-91ee-448c-b47b-5c2a16f2cf1e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1183.576034] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Creating folder: Project (7fc2409b791d45d7804229456f013b84). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1183.576641] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e6321f0-da31-4dcf-bdba-e091735dcfbd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.588967] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Created folder: Project (7fc2409b791d45d7804229456f013b84) in parent group-v580775. [ 1183.589260] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Creating folder: Instances. Parent ref: group-v581031. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1183.589573] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac6889a1-5ad2-4c4b-90f1-43cbe2d6a98b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.600778] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Created folder: Instances in parent group-v581031. [ 1183.601060] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1183.601272] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1183.601491] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66de6215-311a-4fc1-a24c-062721c6af5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.619991] env[68285]: DEBUG nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1183.625100] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1183.625100] env[68285]: value = "task-2892191" [ 1183.625100] env[68285]: _type = "Task" [ 1183.625100] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.636176] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892191, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.646593] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.646856] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.647493] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.647493] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.647493] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.647493] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.647705] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.647863] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.648040] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.648215] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.648447] env[68285]: DEBUG nova.virt.hardware [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.651327] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842eeccd-dc1f-4cc9-9390-9633b9c4ddcb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.662980] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0836c833-97d0-431c-9949-e2721ebce935 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.830893] env[68285]: DEBUG nova.objects.instance [None req-af180208-a5df-47f2-9f61-13fc36a11ff8 tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lazy-loading 'pci_requests' on Instance uuid 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1184.005920] env[68285]: DEBUG nova.compute.manager [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Received event network-changed-2dc847c5-91ee-448c-b47b-5c2a16f2cf1e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1184.006097] env[68285]: DEBUG nova.compute.manager [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Refreshing instance network info cache due to event network-changed-2dc847c5-91ee-448c-b47b-5c2a16f2cf1e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1184.006586] env[68285]: DEBUG oslo_concurrency.lockutils [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] Acquiring lock "refresh_cache-9175fd25-a00c-4a2c-b779-56e6541dcaa1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.006783] env[68285]: DEBUG oslo_concurrency.lockutils [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] Acquired lock "refresh_cache-9175fd25-a00c-4a2c-b779-56e6541dcaa1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.006968] env[68285]: DEBUG nova.network.neutron [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Refreshing network info cache for port 2dc847c5-91ee-448c-b47b-5c2a16f2cf1e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1184.042632] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26c5c451-89c1-413d-8af6-a89ca855bcb4 tempest-ServersTestBootFromVolume-279649200 tempest-ServersTestBootFromVolume-279649200-project-member] Lock "94652533-8c34-42fa-8d70-4effc307ec71" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.901s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.051547] env[68285]: DEBUG nova.compute.manager [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received event network-changed-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1184.051547] env[68285]: DEBUG nova.compute.manager [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Refreshing instance network info cache due to event network-changed-724df450-925b-47ae-884b-4935b5b95ab2. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1184.051811] env[68285]: DEBUG oslo_concurrency.lockutils [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] Acquiring lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.052155] env[68285]: DEBUG oslo_concurrency.lockutils [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] Acquired lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.052155] env[68285]: DEBUG nova.network.neutron [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Refreshing network info cache for port 724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1184.135661] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892191, 'name': CreateVM_Task, 'duration_secs': 0.42334} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.135864] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1184.136551] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.136717] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.137042] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1184.137298] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cb3c025-c14a-472f-9e1a-e24f091a76f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.141816] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1184.141816] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5296f0bd-5ca9-9592-edae-67f426813c7e" [ 1184.141816] env[68285]: _type = "Task" [ 1184.141816] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.150088] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5296f0bd-5ca9-9592-edae-67f426813c7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.334455] env[68285]: DEBUG nova.objects.base [None req-af180208-a5df-47f2-9f61-13fc36a11ff8 tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Object Instance<08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d> lazy-loaded attributes: flavor,pci_requests {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1184.335014] env[68285]: DEBUG nova.network.neutron [None req-af180208-a5df-47f2-9f61-13fc36a11ff8 tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1184.356779] env[68285]: DEBUG nova.network.neutron [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Successfully updated port: 20c0e4b6-0469-4732-b880-d852321ef348 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.369628] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1184.369628] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581030', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'name': 'volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'serial': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1184.370819] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d422e1a-555b-4a7e-bd90-2bb0555e44e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.398086] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2071880d-0c2d-408f-a4f1-2384aaf41019 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.428620] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42/volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1184.428953] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc9e4405-fe89-4e7f-8eee-ec4520e99ebc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.452476] env[68285]: DEBUG oslo_vmware.api [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1184.452476] env[68285]: value = "task-2892192" [ 1184.452476] env[68285]: _type = "Task" [ 1184.452476] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.462201] env[68285]: DEBUG oslo_vmware.api [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.466010] env[68285]: DEBUG oslo_concurrency.lockutils [None req-af180208-a5df-47f2-9f61-13fc36a11ff8 tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "interface-08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.140s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.652012] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5296f0bd-5ca9-9592-edae-67f426813c7e, 'name': SearchDatastore_Task, 'duration_secs': 0.011502} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.657786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.658370] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1184.658620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.658768] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.659156] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1184.659636] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bfba27a-55d7-4378-b607-c41b72e0f8f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.673466] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.673466] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1184.674202] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca8664f3-c43b-43e0-a6b4-d6fd52c320a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.679389] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1184.679389] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ed4c7f-7752-5be0-f156-c393b0e5d723" [ 1184.679389] env[68285]: _type = "Task" [ 1184.679389] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.692762] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ed4c7f-7752-5be0-f156-c393b0e5d723, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.743450] env[68285]: DEBUG nova.network.neutron [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Updated VIF entry in instance network info cache for port 2dc847c5-91ee-448c-b47b-5c2a16f2cf1e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1184.743819] env[68285]: DEBUG nova.network.neutron [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Updating instance_info_cache with network_info: [{"id": "2dc847c5-91ee-448c-b47b-5c2a16f2cf1e", "address": "fa:16:3e:63:b7:89", "network": {"id": "2636fe98-78a6-44ec-ae1c-180ee3c05983", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "7499298836c74d9cb7f25c3b3f185ac3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dc847c5-91", "ovs_interfaceid": "2dc847c5-91ee-448c-b47b-5c2a16f2cf1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.830529] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4d9c38-2835-4792-b5e1-6fa5bf595fd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.842723] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90674079-51c5-4b90-859a-e2710ae1835c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.877158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "refresh_cache-2eec5d74-b1b8-4714-aaf1-687ec56ad860" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.877326] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "refresh_cache-2eec5d74-b1b8-4714-aaf1-687ec56ad860" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.877449] env[68285]: DEBUG nova.network.neutron [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.880127] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3956124f-cd8d-4552-8b3d-a9c463b75f9f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.889954] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e9d3ef-25d9-40c7-8fe6-e57ae1185961 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.905672] env[68285]: DEBUG nova.compute.provider_tree [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.925864] env[68285]: DEBUG nova.network.neutron [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updated VIF entry in instance network info cache for port 724df450-925b-47ae-884b-4935b5b95ab2. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1184.926295] env[68285]: DEBUG nova.network.neutron [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [{"id": "724df450-925b-47ae-884b-4935b5b95ab2", "address": "fa:16:3e:0f:59:8c", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": null, "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap724df450-92", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.948956] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.965034] env[68285]: DEBUG oslo_vmware.api [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.190236] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ed4c7f-7752-5be0-f156-c393b0e5d723, 'name': SearchDatastore_Task, 'duration_secs': 0.015304} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.191145] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d601d77f-afb4-460b-b3ac-69a4e9839422 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.197311] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1185.197311] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5209a57c-325b-79a0-615c-714cd354293e" [ 1185.197311] env[68285]: _type = "Task" [ 1185.197311] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.205155] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5209a57c-325b-79a0-615c-714cd354293e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.249906] env[68285]: DEBUG oslo_concurrency.lockutils [req-2546ceb3-3ab1-4a0d-aa5a-8954c04e9219 req-4edaf85d-b98d-454b-8abf-5204dd7ecb69 service nova] Releasing lock "refresh_cache-9175fd25-a00c-4a2c-b779-56e6541dcaa1" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.409970] env[68285]: DEBUG nova.scheduler.client.report [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1185.414736] env[68285]: DEBUG nova.network.neutron [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.429946] env[68285]: DEBUG oslo_concurrency.lockutils [req-40412663-6381-4d15-bc24-3f2b60117cbf req-0da9832f-ac44-4396-a1fa-400341b81ddb service nova] Releasing lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.465197] env[68285]: DEBUG oslo_vmware.api [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892192, 'name': ReconfigVM_Task, 'duration_secs': 0.644854} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.467666] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfigured VM instance instance-00000052 to attach disk [datastore2] volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42/volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.475413] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f160633-c22e-4719-ab5a-4e50d43ea527 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.493991] env[68285]: DEBUG oslo_vmware.api [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1185.493991] env[68285]: value = "task-2892193" [ 1185.493991] env[68285]: _type = "Task" [ 1185.493991] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.503385] env[68285]: DEBUG oslo_vmware.api [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.629484] env[68285]: DEBUG nova.network.neutron [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Updating instance_info_cache with network_info: [{"id": "20c0e4b6-0469-4732-b880-d852321ef348", "address": "fa:16:3e:78:f7:64", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20c0e4b6-04", "ovs_interfaceid": "20c0e4b6-0469-4732-b880-d852321ef348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.709729] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5209a57c-325b-79a0-615c-714cd354293e, 'name': SearchDatastore_Task, 'duration_secs': 0.010392} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.709729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.709729] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9175fd25-a00c-4a2c-b779-56e6541dcaa1/9175fd25-a00c-4a2c-b779-56e6541dcaa1.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1185.709729] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af463534-1ef4-44c1-8576-89a5a99b221e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.715231] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1185.715231] env[68285]: value = "task-2892194" [ 1185.715231] env[68285]: _type = "Task" [ 1185.715231] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.723097] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.854321] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a78928-2ee2-6f94-2ac7-ae660d24eaf2/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1185.855346] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c90e4f-9bca-466b-8092-43cf099dce48 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.863708] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a78928-2ee2-6f94-2ac7-ae660d24eaf2/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1185.864287] env[68285]: ERROR oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a78928-2ee2-6f94-2ac7-ae660d24eaf2/disk-0.vmdk due to incomplete transfer. [ 1185.864558] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6e685942-65d2-40e6-8690-79879bc39e82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.872754] env[68285]: DEBUG oslo_vmware.rw_handles [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a78928-2ee2-6f94-2ac7-ae660d24eaf2/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1185.872969] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Uploaded image 6a6aeaf1-3b71-4ca7-af37-384a8a742447 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1185.875300] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1185.875625] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0b1bdba2-985c-440a-b63d-2ba62ef623fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.883643] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1185.883643] env[68285]: value = "task-2892195" [ 1185.883643] env[68285]: _type = "Task" [ 1185.883643] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.898072] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892195, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.914438] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.915017] env[68285]: DEBUG nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1185.917832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.095s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.005497] env[68285]: DEBUG oslo_vmware.api [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892193, 'name': ReconfigVM_Task, 'duration_secs': 0.215682} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.006829] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581030', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'name': 'volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'serial': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1186.105692] env[68285]: DEBUG nova.compute.manager [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Received event network-vif-plugged-20c0e4b6-0469-4732-b880-d852321ef348 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.105805] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] Acquiring lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.105971] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.106219] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.106399] env[68285]: DEBUG nova.compute.manager [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] No waiting events found dispatching network-vif-plugged-20c0e4b6-0469-4732-b880-d852321ef348 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1186.106564] env[68285]: WARNING nova.compute.manager [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Received unexpected event network-vif-plugged-20c0e4b6-0469-4732-b880-d852321ef348 for instance with vm_state building and task_state spawning. [ 1186.106720] env[68285]: DEBUG nova.compute.manager [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Received event network-changed-20c0e4b6-0469-4732-b880-d852321ef348 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.106892] env[68285]: DEBUG nova.compute.manager [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Refreshing instance network info cache due to event network-changed-20c0e4b6-0469-4732-b880-d852321ef348. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1186.107129] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] Acquiring lock "refresh_cache-2eec5d74-b1b8-4714-aaf1-687ec56ad860" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.132699] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "refresh_cache-2eec5d74-b1b8-4714-aaf1-687ec56ad860" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.133104] env[68285]: DEBUG nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Instance network_info: |[{"id": "20c0e4b6-0469-4732-b880-d852321ef348", "address": "fa:16:3e:78:f7:64", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20c0e4b6-04", "ovs_interfaceid": "20c0e4b6-0469-4732-b880-d852321ef348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1186.133436] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] Acquired lock "refresh_cache-2eec5d74-b1b8-4714-aaf1-687ec56ad860" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.133629] env[68285]: DEBUG nova.network.neutron [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Refreshing network info cache for port 20c0e4b6-0469-4732-b880-d852321ef348 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1186.134984] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:f7:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20c0e4b6-0469-4732-b880-d852321ef348', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1186.148581] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1186.150803] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1186.150803] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-612c2abb-37f8-4edb-9b09-ff2f7ca5adc6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.172061] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1186.172061] env[68285]: value = "task-2892196" [ 1186.172061] env[68285]: _type = "Task" [ 1186.172061] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.182966] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892196, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.225608] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472822} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.225879] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9175fd25-a00c-4a2c-b779-56e6541dcaa1/9175fd25-a00c-4a2c-b779-56e6541dcaa1.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1186.226518] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1186.226807] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78509019-88b3-4ab5-94c9-c214ae87cbd9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.236051] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1186.236051] env[68285]: value = "task-2892197" [ 1186.236051] env[68285]: _type = "Task" [ 1186.236051] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.284064] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.399975] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892195, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.419911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.421030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.421030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.421030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.421030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.423600] env[68285]: DEBUG nova.compute.utils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1186.433094] env[68285]: INFO nova.compute.manager [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Terminating instance [ 1186.434350] env[68285]: DEBUG nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1186.434531] env[68285]: DEBUG nova.network.neutron [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1186.485123] env[68285]: DEBUG nova.policy [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '557a46b01bbf41e4a343d20c8206aa96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9352aafac6e049feb8d74a91d1600224', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1186.688946] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892196, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.746408] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076314} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.748388] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1186.748388] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c9fe8d-1fe6-4a5c-be26-5e078e25dfbf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.774162] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 9175fd25-a00c-4a2c-b779-56e6541dcaa1/9175fd25-a00c-4a2c-b779-56e6541dcaa1.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.777175] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7961831b-802f-4c90-bd00-b37e0e509d5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.798424] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1186.798424] env[68285]: value = "task-2892198" [ 1186.798424] env[68285]: _type = "Task" [ 1186.798424] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.802153] env[68285]: DEBUG nova.network.neutron [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Successfully created port: 593ddaf0-2e7b-40c6-9df3-82aabf50fefa {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1186.808800] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892198, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.897929] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892195, 'name': Destroy_Task, 'duration_secs': 0.758635} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.898648] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Destroyed the VM [ 1186.898947] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1186.899545] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6a9e6bcf-3d24-4b28-9e79-5af926acdb31 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.906098] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1186.906098] env[68285]: value = "task-2892199" [ 1186.906098] env[68285]: _type = "Task" [ 1186.906098] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.914977] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892199, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.929550] env[68285]: DEBUG nova.network.neutron [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Updated VIF entry in instance network info cache for port 20c0e4b6-0469-4732-b880-d852321ef348. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.929906] env[68285]: DEBUG nova.network.neutron [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Updating instance_info_cache with network_info: [{"id": "20c0e4b6-0469-4732-b880-d852321ef348", "address": "fa:16:3e:78:f7:64", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20c0e4b6-04", "ovs_interfaceid": "20c0e4b6-0469-4732-b880-d852321ef348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.933191] env[68285]: DEBUG nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1186.940244] env[68285]: INFO nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating resource usage from migration 022b54e2-d1a4-4903-8d15-41f4cac86f16 [ 1186.943740] env[68285]: DEBUG nova.compute.manager [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1186.943965] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.945712] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7714f9-c17b-4ce3-bb43-4056f0337953 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.958628] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.959307] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c026126c-59c0-4fe9-8b08-27d69835db2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.965295] env[68285]: DEBUG oslo_vmware.api [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1186.965295] env[68285]: value = "task-2892200" [ 1186.965295] env[68285]: _type = "Task" [ 1186.965295] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.970622] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.970862] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0b04097-292a-47e7-8f14-199b1650dc2c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1186.971014] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9c190abd-23ee-4e8e-8b91-9050847581d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.971133] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 3858399e-9fc4-4d60-a9d5-95caefb7bd87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.971246] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 7790f1e6-c73f-40d6-97af-00e9c518a09c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.971402] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d1446290-95ce-4e87-85df-7cc69bb57ce7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.971581] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 589d1560-9269-4de2-bd79-454ebdaa40d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.971728] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.971861] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5abddda1-9bf7-4039-81c7-8622f43cc72e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.972013] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 75b9c202-b50d-4c59-b3ef-03e61225a1dc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1186.972164] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.972315] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9175fd25-a00c-4a2c-b779-56e6541dcaa1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.972493] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 2eec5d74-b1b8-4714-aaf1-687ec56ad860 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.972675] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance fe6c495f-6917-4e3d-acce-7487a45e3ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1186.982622] env[68285]: DEBUG oslo_vmware.api [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.048149] env[68285]: DEBUG nova.objects.instance [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 7790f1e6-c73f-40d6-97af-00e9c518a09c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.185061] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892196, 'name': CreateVM_Task, 'duration_secs': 0.665916} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.185061] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.185443] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.185443] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.185764] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1187.186082] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec0cc5c9-d840-4bfb-8cd3-ea69e36f4141 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.190978] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1187.190978] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522cf088-ef29-f402-02d0-e904f6eed07b" [ 1187.190978] env[68285]: _type = "Task" [ 1187.190978] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.198905] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522cf088-ef29-f402-02d0-e904f6eed07b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.311395] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892198, 'name': ReconfigVM_Task, 'duration_secs': 0.284808} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.311717] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 9175fd25-a00c-4a2c-b779-56e6541dcaa1/9175fd25-a00c-4a2c-b779-56e6541dcaa1.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.312388] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47dd405c-90a4-46c2-875f-7830b4857aa9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.319412] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1187.319412] env[68285]: value = "task-2892201" [ 1187.319412] env[68285]: _type = "Task" [ 1187.319412] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.334260] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892201, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.419491] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892199, 'name': RemoveSnapshot_Task} progress is 29%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.432562] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f65308c-a416-49dc-bdd7-7df64abb39c7 req-eced9860-9770-4d1e-b4fa-56525b884dbf service nova] Releasing lock "refresh_cache-2eec5d74-b1b8-4714-aaf1-687ec56ad860" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.454312] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.474699] env[68285]: DEBUG oslo_vmware.api [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892200, 'name': PowerOffVM_Task, 'duration_secs': 0.180813} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.475934] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 68aee959-4168-43a7-a8d1-e6e126a52da5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1187.477157] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.477326] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.477924] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9ef16c9-5df3-4021-8e00-c01a8708b937 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.550314] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.550314] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.550314] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Deleting the datastore file [datastore2] 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.550314] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c11da47-b788-4d25-95f8-38d569c50cc2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.553201] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc70aa3a-933a-48cf-ab9a-1e30dd6dbb1a tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.799s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.554619] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.100s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.558157] env[68285]: DEBUG oslo_vmware.api [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for the task: (returnval){ [ 1187.558157] env[68285]: value = "task-2892203" [ 1187.558157] env[68285]: _type = "Task" [ 1187.558157] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.566128] env[68285]: DEBUG oslo_vmware.api [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.701491] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522cf088-ef29-f402-02d0-e904f6eed07b, 'name': SearchDatastore_Task, 'duration_secs': 0.010422} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.701799] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.702048] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1187.702294] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.702439] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.702620] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.702901] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f8b6a84-7010-4c6a-adb5-74dc5ba31bf2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.712635] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.712820] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1187.713597] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10d4359b-6cf4-4cf4-ae3a-944a526ee188 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.719290] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1187.719290] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529b44c1-a759-9531-32c2-143f918d5a11" [ 1187.719290] env[68285]: _type = "Task" [ 1187.719290] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.728562] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529b44c1-a759-9531-32c2-143f918d5a11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.832860] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892201, 'name': Rename_Task, 'duration_secs': 0.142863} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.833278] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1187.833916] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a851ed50-f605-4f31-a934-076b28df7e1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.842102] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1187.842102] env[68285]: value = "task-2892204" [ 1187.842102] env[68285]: _type = "Task" [ 1187.842102] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.850154] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.916492] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892199, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.944946] env[68285]: DEBUG nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1187.970698] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1187.971050] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1187.971179] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1187.971368] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1187.971541] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1187.971700] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1187.971986] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1187.972168] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1187.972342] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1187.972507] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1187.972726] env[68285]: DEBUG nova.virt.hardware [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1187.973569] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e85c14-fe14-4bcc-a8a9-594e1f48ca82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.978704] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance ef87ff30-ef45-4abb-8696-d5493572703a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1187.978899] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Migration 022b54e2-d1a4-4903-8d15-41f4cac86f16 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1187.979049] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance fe8e0a71-e9b0-4035-a696-51455d6fc473 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.979238] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (7bdf675d-15ae-4a4b-9c03-79d8c773b76b): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1187.979541] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1187.979619] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1187.985580] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee8b631-8f71-4f73-a6a1-b1fc90a59eaa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.061268] env[68285]: INFO nova.compute.manager [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Detaching volume 1c482eea-25f7-44c7-bf99-c576cabd8ee0 [ 1188.074292] env[68285]: DEBUG oslo_vmware.api [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Task: {'id': task-2892203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171024} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.078095] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.078361] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.078575] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.078803] env[68285]: INFO nova.compute.manager [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1188.079103] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.079784] env[68285]: DEBUG nova.compute.manager [-] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1188.079784] env[68285]: DEBUG nova.network.neutron [-] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.096870] env[68285]: INFO nova.virt.block_device [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Attempting to driver detach volume 1c482eea-25f7-44c7-bf99-c576cabd8ee0 from mountpoint /dev/sdb [ 1188.097141] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1188.097342] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581024', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'name': 'volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'serial': '1c482eea-25f7-44c7-bf99-c576cabd8ee0'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1188.098311] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a18311e-b8b1-4889-9097-73d0fc21a7e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.129558] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ffd895-475f-4d9c-800f-b873bdd39ada {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.136980] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8734f0e2-90dc-4235-8b6d-a0bade29993f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.164489] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54c6bed-f820-4faa-9481-fa8aba601ec9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.180796] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] The volume has not been displaced from its original location: [datastore2] volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0/volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1188.186080] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1188.188930] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5fc9712-38a0-4a8b-9f99-7ebbaf4b6c44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.210275] env[68285]: DEBUG oslo_vmware.api [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1188.210275] env[68285]: value = "task-2892205" [ 1188.210275] env[68285]: _type = "Task" [ 1188.210275] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.221425] env[68285]: DEBUG oslo_vmware.api [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892205, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.232998] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529b44c1-a759-9531-32c2-143f918d5a11, 'name': SearchDatastore_Task, 'duration_secs': 0.013263} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.232998] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be35812-82cb-4a70-9663-38e5b98999af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.238265] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1188.238265] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5220e017-9ddb-4a46-9441-9d963c80a58e" [ 1188.238265] env[68285]: _type = "Task" [ 1188.238265] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.245539] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5220e017-9ddb-4a46-9441-9d963c80a58e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.357069] env[68285]: DEBUG oslo_vmware.api [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892204, 'name': PowerOnVM_Task, 'duration_secs': 0.507903} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.358218] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1188.358218] env[68285]: INFO nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Took 7.15 seconds to spawn the instance on the hypervisor. [ 1188.358218] env[68285]: DEBUG nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1188.359172] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b91ddf9-429d-43a6-9650-1061071c6dd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.387705] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f7846d-fb32-4ded-913c-8d43bacc2c95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.396358] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3e16b3-90ee-4e0a-a683-d9034106e37d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.436051] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9fe9cb-6dc4-4835-8994-2cfa05ca9d19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.447427] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918c6719-e3f6-4a4d-8a72-2610579a4f82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.451961] env[68285]: DEBUG oslo_vmware.api [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892199, 'name': RemoveSnapshot_Task, 'duration_secs': 1.411475} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.452522] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1188.452749] env[68285]: INFO nova.compute.manager [None req-a1cd89cf-8090-42ef-adb8-eb4879d3ee77 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Took 19.17 seconds to snapshot the instance on the hypervisor. [ 1188.468527] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.640725] env[68285]: DEBUG nova.network.neutron [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Successfully updated port: 593ddaf0-2e7b-40c6-9df3-82aabf50fefa {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1188.673682] env[68285]: DEBUG nova.compute.manager [req-57c53271-9c54-4abf-b9af-89e5415ba9ec req-1819e766-679c-4f1c-ab4b-ef7c97be11dc service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Received event network-vif-deleted-951c2d6f-07b5-4821-b7fb-b15b90849f2a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1188.673682] env[68285]: INFO nova.compute.manager [req-57c53271-9c54-4abf-b9af-89e5415ba9ec req-1819e766-679c-4f1c-ab4b-ef7c97be11dc service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Neutron deleted interface 951c2d6f-07b5-4821-b7fb-b15b90849f2a; detaching it from the instance and deleting it from the info cache [ 1188.673682] env[68285]: DEBUG nova.network.neutron [req-57c53271-9c54-4abf-b9af-89e5415ba9ec req-1819e766-679c-4f1c-ab4b-ef7c97be11dc service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.720037] env[68285]: DEBUG oslo_vmware.api [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892205, 'name': ReconfigVM_Task, 'duration_secs': 0.249714} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.720314] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1188.724930] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b382204-9a92-4c1e-a180-28737c4ed3cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.737534] env[68285]: DEBUG nova.compute.manager [req-12f77189-24a3-4198-8499-f151dfb90ec7 req-1bd02101-dad4-422a-9c5c-a58dc38153d4 service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Received event network-vif-plugged-593ddaf0-2e7b-40c6-9df3-82aabf50fefa {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1188.737739] env[68285]: DEBUG oslo_concurrency.lockutils [req-12f77189-24a3-4198-8499-f151dfb90ec7 req-1bd02101-dad4-422a-9c5c-a58dc38153d4 service nova] Acquiring lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.737942] env[68285]: DEBUG oslo_concurrency.lockutils [req-12f77189-24a3-4198-8499-f151dfb90ec7 req-1bd02101-dad4-422a-9c5c-a58dc38153d4 service nova] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.738130] env[68285]: DEBUG oslo_concurrency.lockutils [req-12f77189-24a3-4198-8499-f151dfb90ec7 req-1bd02101-dad4-422a-9c5c-a58dc38153d4 service nova] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.738299] env[68285]: DEBUG nova.compute.manager [req-12f77189-24a3-4198-8499-f151dfb90ec7 req-1bd02101-dad4-422a-9c5c-a58dc38153d4 service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] No waiting events found dispatching network-vif-plugged-593ddaf0-2e7b-40c6-9df3-82aabf50fefa {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1188.738460] env[68285]: WARNING nova.compute.manager [req-12f77189-24a3-4198-8499-f151dfb90ec7 req-1bd02101-dad4-422a-9c5c-a58dc38153d4 service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Received unexpected event network-vif-plugged-593ddaf0-2e7b-40c6-9df3-82aabf50fefa for instance with vm_state building and task_state spawning. [ 1188.744376] env[68285]: DEBUG oslo_vmware.api [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1188.744376] env[68285]: value = "task-2892206" [ 1188.744376] env[68285]: _type = "Task" [ 1188.744376] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.752463] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5220e017-9ddb-4a46-9441-9d963c80a58e, 'name': SearchDatastore_Task, 'duration_secs': 0.011862} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.753622] env[68285]: DEBUG nova.compute.manager [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Received event network-changed-593ddaf0-2e7b-40c6-9df3-82aabf50fefa {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1188.753790] env[68285]: DEBUG nova.compute.manager [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Refreshing instance network info cache due to event network-changed-593ddaf0-2e7b-40c6-9df3-82aabf50fefa. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1188.754023] env[68285]: DEBUG oslo_concurrency.lockutils [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] Acquiring lock "refresh_cache-fe6c495f-6917-4e3d-acce-7487a45e3ef4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.754178] env[68285]: DEBUG oslo_concurrency.lockutils [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] Acquired lock "refresh_cache-fe6c495f-6917-4e3d-acce-7487a45e3ef4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.754317] env[68285]: DEBUG nova.network.neutron [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Refreshing network info cache for port 593ddaf0-2e7b-40c6-9df3-82aabf50fefa {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1188.755742] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.755806] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 2eec5d74-b1b8-4714-aaf1-687ec56ad860/2eec5d74-b1b8-4714-aaf1-687ec56ad860.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1188.756437] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfad7ae4-f44d-48fe-91b2-ee34cd20a02c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.761746] env[68285]: DEBUG oslo_vmware.api [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892206, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.766546] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1188.766546] env[68285]: value = "task-2892207" [ 1188.766546] env[68285]: _type = "Task" [ 1188.766546] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.775959] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.878542] env[68285]: INFO nova.compute.manager [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Took 29.84 seconds to build instance. [ 1188.883242] env[68285]: DEBUG nova.network.neutron [-] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.971181] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.145083] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-fe6c495f-6917-4e3d-acce-7487a45e3ef4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.175983] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-414b4fdb-f8f8-4c2b-9ef4-87480197f210 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.185794] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86801db-2d1f-48a4-abe3-0584425a65ca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.222228] env[68285]: DEBUG nova.compute.manager [req-57c53271-9c54-4abf-b9af-89e5415ba9ec req-1819e766-679c-4f1c-ab4b-ef7c97be11dc service nova] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Detach interface failed, port_id=951c2d6f-07b5-4821-b7fb-b15b90849f2a, reason: Instance 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1189.254057] env[68285]: DEBUG oslo_vmware.api [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892206, 'name': ReconfigVM_Task, 'duration_secs': 0.143453} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.254057] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581024', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'name': 'volume-1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '1c482eea-25f7-44c7-bf99-c576cabd8ee0', 'serial': '1c482eea-25f7-44c7-bf99-c576cabd8ee0'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1189.277273] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.296547] env[68285]: DEBUG nova.network.neutron [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1189.382135] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e9871207-9c5d-49c0-89b5-50c037c1f430 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.360s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.385238] env[68285]: INFO nova.compute.manager [-] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Took 1.31 seconds to deallocate network for instance. [ 1189.390811] env[68285]: DEBUG nova.network.neutron [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.477945] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1189.478217] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.560s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.478489] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.111s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.480121] env[68285]: INFO nova.compute.claims [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1189.782386] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892207, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.819924] env[68285]: DEBUG nova.objects.instance [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 7790f1e6-c73f-40d6-97af-00e9c518a09c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.893912] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.894470] env[68285]: DEBUG oslo_concurrency.lockutils [req-320c6883-d16f-44bb-b02c-26613e460f26 req-d88e3272-2a35-436a-aa12-436c66d89b6e service nova] Releasing lock "refresh_cache-fe6c495f-6917-4e3d-acce-7487a45e3ef4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.895483] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-fe6c495f-6917-4e3d-acce-7487a45e3ef4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.895654] env[68285]: DEBUG nova.network.neutron [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.281049] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892207, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.10113} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.281353] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 2eec5d74-b1b8-4714-aaf1-687ec56ad860/2eec5d74-b1b8-4714-aaf1-687ec56ad860.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1190.281581] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1190.281901] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78ec9eb2-09e4-482a-9c58-282b97707b61 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.288922] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1190.288922] env[68285]: value = "task-2892208" [ 1190.288922] env[68285]: _type = "Task" [ 1190.288922] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.298106] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.458014] env[68285]: DEBUG nova.network.neutron [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1190.631201] env[68285]: DEBUG nova.network.neutron [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Updating instance_info_cache with network_info: [{"id": "593ddaf0-2e7b-40c6-9df3-82aabf50fefa", "address": "fa:16:3e:5f:c3:55", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap593ddaf0-2e", "ovs_interfaceid": "593ddaf0-2e7b-40c6-9df3-82aabf50fefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.651808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.651808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.651808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.656141] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.656141] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.656141] env[68285]: INFO nova.compute.manager [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Terminating instance [ 1190.806490] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072499} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.807613] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1190.811020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab16234b-3e10-4436-83d6-b2c7f872b3e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.832213] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 2eec5d74-b1b8-4714-aaf1-687ec56ad860/2eec5d74-b1b8-4714-aaf1-687ec56ad860.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.835199] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5a1bb8b1-c8f9-485c-8e87-33eee5e18c7b tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.281s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.836158] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c4fa8b1-172b-4166-82d4-3b304f8691e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.862580] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1190.862580] env[68285]: value = "task-2892209" [ 1190.862580] env[68285]: _type = "Task" [ 1190.862580] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.868048] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d9ac82-cb1c-4501-8f32-0b4a9cbc9ad4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.878515] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc79a49-358b-46db-9ea7-792510276359 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.882152] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892209, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.910834] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd33559-4efb-4139-ba92-326283b76a90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.918402] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6332cfb-e7dd-47ea-bb05-c0067fd571e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.932522] env[68285]: DEBUG nova.compute.provider_tree [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.972749] env[68285]: DEBUG nova.compute.manager [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1190.973714] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28b4744-e46b-4296-b27a-88b690216611 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.085833] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.086232] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.135604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-fe6c495f-6917-4e3d-acce-7487a45e3ef4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.136645] env[68285]: DEBUG nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Instance network_info: |[{"id": "593ddaf0-2e7b-40c6-9df3-82aabf50fefa", "address": "fa:16:3e:5f:c3:55", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap593ddaf0-2e", "ovs_interfaceid": "593ddaf0-2e7b-40c6-9df3-82aabf50fefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1191.136645] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:c3:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '593ddaf0-2e7b-40c6-9df3-82aabf50fefa', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.144189] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1191.144909] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1191.145275] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd741db3-f65d-4d40-8315-940188bcd4a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.166538] env[68285]: DEBUG nova.compute.manager [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1191.167595] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1191.168187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabf5465-74e7-45b0-b22d-64870c02663e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.176669] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1191.178198] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1920c5b-32b0-440a-9698-28483380665e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.180468] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.180468] env[68285]: value = "task-2892210" [ 1191.180468] env[68285]: _type = "Task" [ 1191.180468] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.188555] env[68285]: DEBUG oslo_vmware.api [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1191.188555] env[68285]: value = "task-2892211" [ 1191.188555] env[68285]: _type = "Task" [ 1191.188555] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.192207] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892210, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.202477] env[68285]: DEBUG oslo_vmware.api [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.280253] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "801f524e-28b5-4452-b880-0fc30d3c5eef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.280253] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.377504] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892209, 'name': ReconfigVM_Task, 'duration_secs': 0.347689} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.377504] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 2eec5d74-b1b8-4714-aaf1-687ec56ad860/2eec5d74-b1b8-4714-aaf1-687ec56ad860.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.377504] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea9e83e3-c466-4b83-93c0-f175eee3cbe7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.385270] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1191.385270] env[68285]: value = "task-2892212" [ 1191.385270] env[68285]: _type = "Task" [ 1191.385270] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.394049] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892212, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.436758] env[68285]: DEBUG nova.scheduler.client.report [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.487121] env[68285]: INFO nova.compute.manager [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] instance snapshotting [ 1191.490405] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6873865-ae90-46c8-ae37-a779414a962a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.510051] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb46605f-757e-4f92-bfc0-342f1badd30a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.589526] env[68285]: INFO nova.compute.manager [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Detaching volume 3ec9591e-5da4-46d7-a97b-a79b2fb4dc42 [ 1191.638129] env[68285]: INFO nova.virt.block_device [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Attempting to driver detach volume 3ec9591e-5da4-46d7-a97b-a79b2fb4dc42 from mountpoint /dev/sdc [ 1191.638368] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1191.638549] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581030', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'name': 'volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'serial': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1191.639481] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3927acea-9cc0-4c94-a80e-b2eaa0a13a04 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.663721] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8b2d1e-2fac-4b4d-b834-069aa7d2ce0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.670964] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031567c6-9f9f-4fbd-9601-b6ef605aeb35 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.695788] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10dbe040-654c-4cd5-9c8e-3e268b49f875 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.706163] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892210, 'name': CreateVM_Task, 'duration_secs': 0.450858} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.720762] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1191.721359] env[68285]: DEBUG oslo_vmware.api [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892211, 'name': PowerOffVM_Task, 'duration_secs': 0.227613} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.721669] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] The volume has not been displaced from its original location: [datastore2] volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42/volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1191.726839] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfiguring VM instance instance-00000052 to detach disk 2002 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1191.728744] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.728932] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.729269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1191.729534] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1191.729695] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1191.729906] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e14b95-0d9d-4e4d-9831-771c24f00c9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.747672] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66c75966-ced4-40a0-99af-7c93eae74320 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.750734] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd930d74-9631-4cb3-baac-e2b81e40d4d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.750891] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.750999] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.759149] env[68285]: DEBUG oslo_vmware.api [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1191.759149] env[68285]: value = "task-2892214" [ 1191.759149] env[68285]: _type = "Task" [ 1191.759149] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.759149] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1191.759149] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5265ff73-1482-c8ac-4d1d-05559f7b7dd7" [ 1191.759149] env[68285]: _type = "Task" [ 1191.759149] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.770663] env[68285]: DEBUG oslo_vmware.api [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892214, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.774899] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5265ff73-1482-c8ac-4d1d-05559f7b7dd7, 'name': SearchDatastore_Task, 'duration_secs': 0.010039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.774899] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.774899] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1191.774899] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.775091] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.775222] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1191.775463] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45c2688c-e342-42bd-97fd-5f9ab4852212 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.783623] env[68285]: DEBUG nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1191.786441] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1191.790027] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1191.790027] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6d28862-edb1-4195-b209-2487ef3a166b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.795584] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1191.795584] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5265f6bc-5be5-6025-0e7b-e5380656d643" [ 1191.795584] env[68285]: _type = "Task" [ 1191.795584] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.807104] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5265f6bc-5be5-6025-0e7b-e5380656d643, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.821563] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1191.821937] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1191.821937] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Deleting the datastore file [datastore2] 9175fd25-a00c-4a2c-b779-56e6541dcaa1 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1191.822204] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a286d877-1a0a-4efe-9c7e-c2dbd8e341e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.828316] env[68285]: DEBUG oslo_vmware.api [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for the task: (returnval){ [ 1191.828316] env[68285]: value = "task-2892215" [ 1191.828316] env[68285]: _type = "Task" [ 1191.828316] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.836731] env[68285]: DEBUG oslo_vmware.api [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.895265] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892212, 'name': Rename_Task, 'duration_secs': 0.27917} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.895586] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1191.895843] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-135480db-afe2-460c-b567-d512a3075da2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.902361] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1191.902361] env[68285]: value = "task-2892216" [ 1191.902361] env[68285]: _type = "Task" [ 1191.902361] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.911933] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.944240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.944930] env[68285]: DEBUG nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1191.948254] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.442s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.953019] env[68285]: INFO nova.compute.claims [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1192.020994] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1192.021329] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-838e8329-0410-480c-b3e7-07a65e8ca335 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.029140] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1192.029140] env[68285]: value = "task-2892217" [ 1192.029140] env[68285]: _type = "Task" [ 1192.029140] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.036958] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892217, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.253854] env[68285]: DEBUG nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1192.268018] env[68285]: DEBUG oslo_vmware.api [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892214, 'name': ReconfigVM_Task, 'duration_secs': 0.35684} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.268377] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Reconfigured VM instance instance-00000052 to detach disk 2002 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1192.273910] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e8c6b0c-25e8-4fdd-a31c-bdde377f8700 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.291992] env[68285]: DEBUG oslo_vmware.api [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1192.291992] env[68285]: value = "task-2892218" [ 1192.291992] env[68285]: _type = "Task" [ 1192.291992] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.305700] env[68285]: DEBUG oslo_vmware.api [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.313846] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5265f6bc-5be5-6025-0e7b-e5380656d643, 'name': SearchDatastore_Task, 'duration_secs': 0.013692} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.316508] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.316508] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51558eff-40a9-4a18-a4e7-2456ac0ea618 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.322640] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1192.322640] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d52ea8-2790-8fac-8373-1a2d10baed92" [ 1192.322640] env[68285]: _type = "Task" [ 1192.322640] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.333224] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d52ea8-2790-8fac-8373-1a2d10baed92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.341391] env[68285]: DEBUG oslo_vmware.api [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Task: {'id': task-2892215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201427} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.341700] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1192.341945] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1192.342415] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1192.342415] env[68285]: INFO nova.compute.manager [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1192.342718] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1192.342941] env[68285]: DEBUG nova.compute.manager [-] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1192.343110] env[68285]: DEBUG nova.network.neutron [-] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1192.412428] env[68285]: DEBUG oslo_vmware.api [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892216, 'name': PowerOnVM_Task, 'duration_secs': 0.508159} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.412744] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.412946] env[68285]: INFO nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Took 8.79 seconds to spawn the instance on the hypervisor. [ 1192.413151] env[68285]: DEBUG nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1192.414183] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94ffd99-f0dc-49f0-8bdf-08a58bccc579 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.458634] env[68285]: DEBUG nova.compute.utils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1192.460284] env[68285]: DEBUG nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1192.462154] env[68285]: DEBUG nova.network.neutron [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1192.475012] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "9c190abd-23ee-4e8e-8b91-9050847581d5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.475261] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.514823] env[68285]: DEBUG nova.policy [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080ca112b7534d1284942bdd41514e66', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '923c0329269c41159ae4469d358fe25f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1192.541273] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892217, 'name': CreateSnapshot_Task, 'duration_secs': 0.511638} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.541600] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1192.542497] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b75cdda-44d7-4895-b9c1-1648bbe623c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.634804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.635142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.635358] env[68285]: DEBUG nova.compute.manager [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1192.636336] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ad048c-ac87-40bb-8857-0fcb648895e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.644634] env[68285]: DEBUG nova.compute.manager [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1192.645626] env[68285]: DEBUG nova.objects.instance [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'flavor' on Instance uuid d1446290-95ce-4e87-85df-7cc69bb57ce7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.685878] env[68285]: DEBUG nova.compute.manager [req-9a0c3592-681e-4b71-9786-42ffccd66492 req-664e9084-5125-4057-a5e7-7901dc60191c service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Received event network-vif-deleted-2dc847c5-91ee-448c-b47b-5c2a16f2cf1e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1192.686119] env[68285]: INFO nova.compute.manager [req-9a0c3592-681e-4b71-9786-42ffccd66492 req-664e9084-5125-4057-a5e7-7901dc60191c service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Neutron deleted interface 2dc847c5-91ee-448c-b47b-5c2a16f2cf1e; detaching it from the instance and deleting it from the info cache [ 1192.686546] env[68285]: DEBUG nova.network.neutron [req-9a0c3592-681e-4b71-9786-42ffccd66492 req-664e9084-5125-4057-a5e7-7901dc60191c service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.775667] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.799371] env[68285]: DEBUG nova.network.neutron [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Successfully created port: 47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1192.804273] env[68285]: DEBUG oslo_vmware.api [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892218, 'name': ReconfigVM_Task, 'duration_secs': 0.171608} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.804539] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581030', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'name': 'volume-3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7790f1e6-c73f-40d6-97af-00e9c518a09c', 'attached_at': '', 'detached_at': '', 'volume_id': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42', 'serial': '3ec9591e-5da4-46d7-a97b-a79b2fb4dc42'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1192.832399] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d52ea8-2790-8fac-8373-1a2d10baed92, 'name': SearchDatastore_Task, 'duration_secs': 0.010322} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.832646] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.833049] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] fe6c495f-6917-4e3d-acce-7487a45e3ef4/fe6c495f-6917-4e3d-acce-7487a45e3ef4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1192.833162] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c4d4f8d-80b7-478a-b989-a8493dd029fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.839675] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1192.839675] env[68285]: value = "task-2892219" [ 1192.839675] env[68285]: _type = "Task" [ 1192.839675] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.847075] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.936025] env[68285]: INFO nova.compute.manager [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Took 30.98 seconds to build instance. [ 1192.964679] env[68285]: DEBUG nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1192.979398] env[68285]: INFO nova.compute.manager [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Detaching volume 91d9331a-255e-4596-9535-7bf73c4b34d3 [ 1193.019547] env[68285]: INFO nova.virt.block_device [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Attempting to driver detach volume 91d9331a-255e-4596-9535-7bf73c4b34d3 from mountpoint /dev/sdb [ 1193.019820] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1193.020047] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1193.021051] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614f1424-4939-4bd8-a048-773b38975b5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.061352] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1193.064785] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-09d2b054-ad41-49a7-9d57-86689e9d425e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.068574] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408a4a27-31c5-4cc0-be51-f7a9e94eb3f0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.079231] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58d2fc2-2cb8-4154-b40f-cff7094f9a76 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.082379] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1193.082379] env[68285]: value = "task-2892220" [ 1193.082379] env[68285]: _type = "Task" [ 1193.082379] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.111330] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999f4927-84de-4e24-b787-edbd2d623d66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.118368] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892220, 'name': CloneVM_Task} progress is 12%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.132994] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] The volume has not been displaced from its original location: [datastore1] volume-91d9331a-255e-4596-9535-7bf73c4b34d3/volume-91d9331a-255e-4596-9535-7bf73c4b34d3.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1193.138911] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1193.142249] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64deb2ad-841d-446c-98ac-66082561f9d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.163886] env[68285]: DEBUG oslo_vmware.api [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1193.163886] env[68285]: value = "task-2892221" [ 1193.163886] env[68285]: _type = "Task" [ 1193.163886] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.176195] env[68285]: DEBUG nova.network.neutron [-] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.177741] env[68285]: DEBUG oslo_vmware.api [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892221, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.189366] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a5ba98c-17e9-4642-9f57-3595369d2eb2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.204946] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82328e75-b1b5-4052-9458-7112f29b5cd8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.247678] env[68285]: DEBUG nova.compute.manager [req-9a0c3592-681e-4b71-9786-42ffccd66492 req-664e9084-5125-4057-a5e7-7901dc60191c service nova] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Detach interface failed, port_id=2dc847c5-91ee-448c-b47b-5c2a16f2cf1e, reason: Instance 9175fd25-a00c-4a2c-b779-56e6541dcaa1 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1193.353645] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489858} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.355609] env[68285]: DEBUG nova.objects.instance [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'flavor' on Instance uuid 7790f1e6-c73f-40d6-97af-00e9c518a09c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.356928] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] fe6c495f-6917-4e3d-acce-7487a45e3ef4/fe6c495f-6917-4e3d-acce-7487a45e3ef4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1193.357155] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1193.357622] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b947cc47-e6ba-4ebb-833c-8bcf484c4753 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.364796] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1193.364796] env[68285]: value = "task-2892222" [ 1193.364796] env[68285]: _type = "Task" [ 1193.364796] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.373102] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.389702] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a9dbf5-c05e-4833-8ac5-845cb9b192be {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.400723] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5767e202-6222-4d2f-a4b2-ec1f6e2d7af8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.437026] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eff7877-9b4d-4201-bc84-7fc5e3e97d1a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.440051] env[68285]: DEBUG oslo_concurrency.lockutils [None req-49e6d465-1a62-4357-870c-9dd1312bc094 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.491s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.446465] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6ffa54-8a49-4e60-b686-99b64d155da2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.462345] env[68285]: DEBUG nova.compute.provider_tree [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.595672] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892220, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.661090] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.661405] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd1d5968-5b34-46f2-ad11-8f3cb58e8bee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.669408] env[68285]: DEBUG oslo_vmware.api [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1193.669408] env[68285]: value = "task-2892223" [ 1193.669408] env[68285]: _type = "Task" [ 1193.669408] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.672609] env[68285]: DEBUG oslo_vmware.api [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892221, 'name': ReconfigVM_Task, 'duration_secs': 0.310516} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.675411] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1193.680178] env[68285]: INFO nova.compute.manager [-] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Took 1.34 seconds to deallocate network for instance. [ 1193.680389] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a3cc1b4-d0d6-4f09-801e-0c7990ef4740 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.699225] env[68285]: DEBUG oslo_vmware.api [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.704264] env[68285]: DEBUG oslo_vmware.api [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1193.704264] env[68285]: value = "task-2892224" [ 1193.704264] env[68285]: _type = "Task" [ 1193.704264] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.712623] env[68285]: DEBUG oslo_vmware.api [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.876121] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213702} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.876392] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1193.877253] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6930fec2-a404-4a16-9b8a-2094eb403820 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.901243] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] fe6c495f-6917-4e3d-acce-7487a45e3ef4/fe6c495f-6917-4e3d-acce-7487a45e3ef4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1193.901523] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e6b173e-4830-4122-95a1-8d7f09d759eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.922139] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1193.922139] env[68285]: value = "task-2892225" [ 1193.922139] env[68285]: _type = "Task" [ 1193.922139] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.931659] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892225, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.965998] env[68285]: DEBUG nova.scheduler.client.report [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.973188] env[68285]: DEBUG nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1193.999167] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.999418] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.999575] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1194.000268] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.000268] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1194.000268] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1194.000268] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1194.000416] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1194.000531] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1194.000689] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1194.001286] env[68285]: DEBUG nova.virt.hardware [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1194.001723] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3878b5d7-6048-4c88-bc8d-8297deb10a49 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.008974] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606ca37b-9649-4af0-bb95-81e282144cf0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.097226] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892220, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.178892] env[68285]: DEBUG nova.compute.manager [req-8cc9f255-0516-4f23-bd12-3fb503abb0bb req-2eb27ed4-7912-460a-a273-a0929ccf3429 service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Received event network-vif-plugged-47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1194.179123] env[68285]: DEBUG oslo_concurrency.lockutils [req-8cc9f255-0516-4f23-bd12-3fb503abb0bb req-2eb27ed4-7912-460a-a273-a0929ccf3429 service nova] Acquiring lock "68aee959-4168-43a7-a8d1-e6e126a52da5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.179330] env[68285]: DEBUG oslo_concurrency.lockutils [req-8cc9f255-0516-4f23-bd12-3fb503abb0bb req-2eb27ed4-7912-460a-a273-a0929ccf3429 service nova] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.179496] env[68285]: DEBUG oslo_concurrency.lockutils [req-8cc9f255-0516-4f23-bd12-3fb503abb0bb req-2eb27ed4-7912-460a-a273-a0929ccf3429 service nova] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.179773] env[68285]: DEBUG nova.compute.manager [req-8cc9f255-0516-4f23-bd12-3fb503abb0bb req-2eb27ed4-7912-460a-a273-a0929ccf3429 service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] No waiting events found dispatching network-vif-plugged-47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1194.179815] env[68285]: WARNING nova.compute.manager [req-8cc9f255-0516-4f23-bd12-3fb503abb0bb req-2eb27ed4-7912-460a-a273-a0929ccf3429 service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Received unexpected event network-vif-plugged-47dedd89-6346-46ef-93a1-287c2727d7cc for instance with vm_state building and task_state spawning. [ 1194.183540] env[68285]: DEBUG oslo_vmware.api [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.197058] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.213746] env[68285]: DEBUG oslo_vmware.api [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892224, 'name': ReconfigVM_Task, 'duration_secs': 0.258242} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.213899] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581006', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'name': 'volume-91d9331a-255e-4596-9535-7bf73c4b34d3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9c190abd-23ee-4e8e-8b91-9050847581d5', 'attached_at': '', 'detached_at': '', 'volume_id': '91d9331a-255e-4596-9535-7bf73c4b34d3', 'serial': '91d9331a-255e-4596-9535-7bf73c4b34d3'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1194.272891] env[68285]: DEBUG nova.network.neutron [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Successfully updated port: 47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1194.326570] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.326686] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.327011] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.327253] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.327474] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.329762] env[68285]: INFO nova.compute.manager [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Terminating instance [ 1194.363765] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1d898951-e43d-435f-94ee-02c57a3ba173 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.277s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.432653] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892225, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.471092] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.471533] env[68285]: DEBUG nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1194.474325] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.357s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.596566] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892220, 'name': CloneVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.682668] env[68285]: DEBUG oslo_vmware.api [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.762613] env[68285]: DEBUG nova.objects.instance [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'flavor' on Instance uuid 9c190abd-23ee-4e8e-8b91-9050847581d5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1194.777417] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.777657] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.777946] env[68285]: DEBUG nova.network.neutron [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.834211] env[68285]: DEBUG nova.compute.manager [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1194.834441] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.835359] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f181a92e-2f01-4d4d-9f84-bb0c487d0774 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.844957] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.845220] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f36e66fa-7fde-4241-9b23-870c4f06f5d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.851927] env[68285]: DEBUG oslo_vmware.api [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1194.851927] env[68285]: value = "task-2892226" [ 1194.851927] env[68285]: _type = "Task" [ 1194.851927] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.860565] env[68285]: DEBUG oslo_vmware.api [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.933118] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892225, 'name': ReconfigVM_Task, 'duration_secs': 0.809353} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.933370] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Reconfigured VM instance instance-0000005f to attach disk [datastore2] fe6c495f-6917-4e3d-acce-7487a45e3ef4/fe6c495f-6917-4e3d-acce-7487a45e3ef4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1194.934016] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1372e84-1089-4b0f-ae3b-bc326ea995ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.940935] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1194.940935] env[68285]: value = "task-2892227" [ 1194.940935] env[68285]: _type = "Task" [ 1194.940935] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.950481] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892227, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.979468] env[68285]: INFO nova.compute.claims [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.983783] env[68285]: DEBUG nova.compute.utils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1194.985134] env[68285]: DEBUG nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1194.985325] env[68285]: DEBUG nova.network.neutron [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1195.043308] env[68285]: DEBUG nova.policy [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fd0582abf8e4fff8e6f8316ba430988', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07b5865cc5804d8d98073e5d0c1449aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1195.099347] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892220, 'name': CloneVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.184015] env[68285]: DEBUG oslo_vmware.api [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892223, 'name': PowerOffVM_Task, 'duration_secs': 1.121767} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.184260] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1195.184452] env[68285]: DEBUG nova.compute.manager [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1195.185242] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9c1513-92b1-4bb7-9019-66772b4b9972 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.311554] env[68285]: DEBUG nova.network.neutron [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Successfully created port: 0399b6c2-519b-440a-a775-e9fb28623777 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1195.313986] env[68285]: DEBUG nova.network.neutron [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1195.362866] env[68285]: DEBUG oslo_vmware.api [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892226, 'name': PowerOffVM_Task, 'duration_secs': 0.215388} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.363151] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1195.363341] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1195.363590] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fd0132c-f712-4c6c-9976-5b72929a0550 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.451045] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892227, 'name': Rename_Task, 'duration_secs': 0.179162} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.451045] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.451290] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6628f907-f33a-4d00-8c2c-70c5279915f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.458102] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1195.458102] env[68285]: value = "task-2892229" [ 1195.458102] env[68285]: _type = "Task" [ 1195.458102] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.466132] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.470433] env[68285]: DEBUG nova.network.neutron [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updating instance_info_cache with network_info: [{"id": "47dedd89-6346-46ef-93a1-287c2727d7cc", "address": "fa:16:3e:2c:83:3a", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47dedd89-63", "ovs_interfaceid": "47dedd89-6346-46ef-93a1-287c2727d7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.487165] env[68285]: INFO nova.compute.resource_tracker [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating resource usage from migration 022b54e2-d1a4-4903-8d15-41f4cac86f16 [ 1195.491653] env[68285]: DEBUG nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1195.604401] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892220, 'name': CloneVM_Task, 'duration_secs': 2.059154} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.604401] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Created linked-clone VM from snapshot [ 1195.604856] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef62fc3-503a-481f-87bc-21ee74a8666c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.612591] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Uploading image 4efe1c74-2846-4e11-a589-faa9aa03604d {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1195.631639] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.631929] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.632174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "7790f1e6-c73f-40d6-97af-00e9c518a09c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.632368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.632542] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.636144] env[68285]: INFO nova.compute.manager [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Terminating instance [ 1195.643550] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1195.643550] env[68285]: value = "vm-581037" [ 1195.643550] env[68285]: _type = "VirtualMachine" [ 1195.643550] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1195.643550] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-56bb5ca4-5524-443b-a79f-bfdd9c2ceee7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.651564] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lease: (returnval){ [ 1195.651564] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5288dfd4-8842-d01f-3a59-7f0df5aa8e67" [ 1195.651564] env[68285]: _type = "HttpNfcLease" [ 1195.651564] env[68285]: } obtained for exporting VM: (result){ [ 1195.651564] env[68285]: value = "vm-581037" [ 1195.651564] env[68285]: _type = "VirtualMachine" [ 1195.651564] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1195.652113] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the lease: (returnval){ [ 1195.652113] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5288dfd4-8842-d01f-3a59-7f0df5aa8e67" [ 1195.652113] env[68285]: _type = "HttpNfcLease" [ 1195.652113] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1195.661811] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1195.661811] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5288dfd4-8842-d01f-3a59-7f0df5aa8e67" [ 1195.661811] env[68285]: _type = "HttpNfcLease" [ 1195.661811] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1195.699293] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37788f52-2999-4ceb-9f58-737ba2299ce6 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.064s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.715161] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1195.715382] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1195.715564] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleting the datastore file [datastore2] 2eec5d74-b1b8-4714-aaf1-687ec56ad860 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.715827] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c1ce01a-9770-4e1a-b6da-7ab68071e903 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.722581] env[68285]: DEBUG oslo_vmware.api [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1195.722581] env[68285]: value = "task-2892231" [ 1195.722581] env[68285]: _type = "Task" [ 1195.722581] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.733409] env[68285]: DEBUG oslo_vmware.api [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.769033] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ebb54d30-cb53-40e6-9efb-1acd33f00d80 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.294s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.813449] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babb16f3-09c6-4cd2-a42c-3c5bd0bf7649 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.821473] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e9d5cc-c85d-4303-834a-1a0bf1678bb0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.852363] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146c40b1-b29d-4bdc-b385-b4eef1927ea9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.860036] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ca140f-bab1-49b9-aac4-ae2aed95a3a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.875756] env[68285]: DEBUG nova.compute.provider_tree [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1195.967701] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.973377] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.973679] env[68285]: DEBUG nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Instance network_info: |[{"id": "47dedd89-6346-46ef-93a1-287c2727d7cc", "address": "fa:16:3e:2c:83:3a", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47dedd89-63", "ovs_interfaceid": "47dedd89-6346-46ef-93a1-287c2727d7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1195.974082] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:83:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47dedd89-6346-46ef-93a1-287c2727d7cc', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1195.981518] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1195.981771] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1195.983027] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1563bced-ce2c-4892-bcf9-589fbdc9c8ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.003710] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.003710] env[68285]: value = "task-2892232" [ 1196.003710] env[68285]: _type = "Task" [ 1196.003710] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.010902] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892232, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.140088] env[68285]: DEBUG nova.compute.manager [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1196.140312] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.141313] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c88ab07-e139-404c-8e6f-f6d60c453a57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.150393] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.150698] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f1346bd-8f3c-4179-ab4c-5b6b45e6fd30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.162297] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1196.162297] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5288dfd4-8842-d01f-3a59-7f0df5aa8e67" [ 1196.162297] env[68285]: _type = "HttpNfcLease" [ 1196.162297] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1196.163750] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1196.163750] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5288dfd4-8842-d01f-3a59-7f0df5aa8e67" [ 1196.163750] env[68285]: _type = "HttpNfcLease" [ 1196.163750] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1196.164199] env[68285]: DEBUG oslo_vmware.api [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1196.164199] env[68285]: value = "task-2892233" [ 1196.164199] env[68285]: _type = "Task" [ 1196.164199] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.164992] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71a5342-1659-4158-acef-e9902cac2e5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.176511] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288bc59-e9a9-c0f3-e925-89c0e002b6e0/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1196.176757] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288bc59-e9a9-c0f3-e925-89c0e002b6e0/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1196.181103] env[68285]: DEBUG oslo_vmware.api [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.242578] env[68285]: DEBUG nova.compute.manager [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Received event network-changed-47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.242756] env[68285]: DEBUG nova.compute.manager [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Refreshing instance network info cache due to event network-changed-47dedd89-6346-46ef-93a1-287c2727d7cc. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1196.242999] env[68285]: DEBUG oslo_concurrency.lockutils [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] Acquiring lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.243107] env[68285]: DEBUG oslo_concurrency.lockutils [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] Acquired lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.243274] env[68285]: DEBUG nova.network.neutron [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Refreshing network info cache for port 47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.252975] env[68285]: DEBUG oslo_vmware.api [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143796} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.252975] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.253265] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.253265] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.253786] env[68285]: INFO nova.compute.manager [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Took 1.42 seconds to destroy the instance on the hypervisor. [ 1196.253786] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1196.254423] env[68285]: DEBUG nova.compute.manager [-] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1196.254531] env[68285]: DEBUG nova.network.neutron [-] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1196.269484] env[68285]: DEBUG nova.objects.instance [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'flavor' on Instance uuid d1446290-95ce-4e87-85df-7cc69bb57ce7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.281697] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e1a63019-7e59-4e19-9b09-d98b01500c02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.398259] env[68285]: ERROR nova.scheduler.client.report [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [req-54669f24-1886-4be9-9f88-e24b0ab24150] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-54669f24-1886-4be9-9f88-e24b0ab24150"}]} [ 1196.415261] env[68285]: DEBUG nova.scheduler.client.report [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1196.429244] env[68285]: DEBUG nova.scheduler.client.report [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1196.429512] env[68285]: DEBUG nova.compute.provider_tree [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1196.445887] env[68285]: DEBUG nova.scheduler.client.report [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1196.472753] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892229, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.476943] env[68285]: DEBUG nova.scheduler.client.report [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1196.501261] env[68285]: DEBUG nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1196.514955] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892232, 'name': CreateVM_Task, 'duration_secs': 0.334298} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.515517] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1196.516392] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.516677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.517366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1196.519611] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e734824f-4d22-4cb0-be0a-6083a903831a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.531425] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1196.531425] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e842d1-c31f-da4a-4d13-5e89aa6e0b3f" [ 1196.531425] env[68285]: _type = "Task" [ 1196.531425] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.533791] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1196.534054] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.534451] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1196.534776] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.535158] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1196.535520] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1196.535846] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1196.536173] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1196.536441] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1196.536688] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1196.536964] env[68285]: DEBUG nova.virt.hardware [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1196.538208] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4ea4ed-e6f7-4ae0-abe4-a0f13e00fb8e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.553980] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e842d1-c31f-da4a-4d13-5e89aa6e0b3f, 'name': SearchDatastore_Task, 'duration_secs': 0.014353} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.556846] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.557138] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1196.557424] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.557622] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.557770] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1196.561176] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b81c7e5-473a-464c-ad68-5799dd9ce64a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.564179] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593c6c4e-de61-4cad-8c83-3dd69304bfe3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.586189] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1196.586406] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1196.587714] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7df2d3df-45a8-4fd5-a806-76491815f626 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.593246] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1196.593246] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e25e87-f466-b0d1-3440-1c4a7531a835" [ 1196.593246] env[68285]: _type = "Task" [ 1196.593246] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.601550] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e25e87-f466-b0d1-3440-1c4a7531a835, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.685946] env[68285]: DEBUG oslo_vmware.api [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892233, 'name': PowerOffVM_Task, 'duration_secs': 0.199004} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.686402] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.686706] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.687556] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ca67397-98fa-4090-a1b7-bd17255ec487 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.692610] env[68285]: DEBUG nova.compute.manager [req-98d025a5-a59b-4ff2-b72e-64d9b2411540 req-24fa9f9d-cd82-4be5-88de-a57700463dfe service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Received event network-vif-deleted-20c0e4b6-0469-4732-b880-d852321ef348 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.692857] env[68285]: INFO nova.compute.manager [req-98d025a5-a59b-4ff2-b72e-64d9b2411540 req-24fa9f9d-cd82-4be5-88de-a57700463dfe service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Neutron deleted interface 20c0e4b6-0469-4732-b880-d852321ef348; detaching it from the instance and deleting it from the info cache [ 1196.693058] env[68285]: DEBUG nova.network.neutron [req-98d025a5-a59b-4ff2-b72e-64d9b2411540 req-24fa9f9d-cd82-4be5-88de-a57700463dfe service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.766541] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.766733] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.766907] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Deleting the datastore file [datastore1] 7790f1e6-c73f-40d6-97af-00e9c518a09c {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.767315] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd5b70cd-891e-46b5-a2d7-a4e59951b1f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.775616] env[68285]: DEBUG oslo_vmware.api [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for the task: (returnval){ [ 1196.775616] env[68285]: value = "task-2892235" [ 1196.775616] env[68285]: _type = "Task" [ 1196.775616] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.776389] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.776639] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.777025] env[68285]: DEBUG nova.network.neutron [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.777025] env[68285]: DEBUG nova.objects.instance [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'info_cache' on Instance uuid d1446290-95ce-4e87-85df-7cc69bb57ce7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.791652] env[68285]: DEBUG oslo_vmware.api [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.798491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "9c190abd-23ee-4e8e-8b91-9050847581d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.799066] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.799295] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "9c190abd-23ee-4e8e-8b91-9050847581d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.799489] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.799658] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.802453] env[68285]: INFO nova.compute.manager [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Terminating instance [ 1196.879694] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f6094d-9c1f-4e5b-9120-958b482f1b9f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.885674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023f2fbb-6bf3-48df-8a73-c142acc1219f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.924284] env[68285]: DEBUG nova.network.neutron [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Successfully updated port: 0399b6c2-519b-440a-a775-e9fb28623777 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1196.926757] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df8e2f5-60fa-43a9-a6d0-da110322565c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.935691] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b31f093-977d-440a-aebf-bc48cf34208e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.953841] env[68285]: DEBUG nova.compute.provider_tree [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1196.971135] env[68285]: DEBUG oslo_vmware.api [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892229, 'name': PowerOnVM_Task, 'duration_secs': 1.10092} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.971428] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1196.973414] env[68285]: INFO nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1196.973414] env[68285]: DEBUG nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1196.973616] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cf077e-d6fb-453a-bf27-bd6c27e3beed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.104305] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e25e87-f466-b0d1-3440-1c4a7531a835, 'name': SearchDatastore_Task, 'duration_secs': 0.011562} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.105152] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad2b1dcf-e880-431c-af00-af32bcdbce78 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.110418] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1197.110418] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52caaaf7-9643-f1a8-80f5-ec4966dcf3b8" [ 1197.110418] env[68285]: _type = "Task" [ 1197.110418] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.118671] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52caaaf7-9643-f1a8-80f5-ec4966dcf3b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.134245] env[68285]: DEBUG nova.network.neutron [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updated VIF entry in instance network info cache for port 47dedd89-6346-46ef-93a1-287c2727d7cc. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1197.134633] env[68285]: DEBUG nova.network.neutron [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updating instance_info_cache with network_info: [{"id": "47dedd89-6346-46ef-93a1-287c2727d7cc", "address": "fa:16:3e:2c:83:3a", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47dedd89-63", "ovs_interfaceid": "47dedd89-6346-46ef-93a1-287c2727d7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.164869] env[68285]: DEBUG nova.network.neutron [-] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.195975] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e192a5a1-4411-451d-9f0f-2e9d58c2e694 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.205490] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5998aea6-7140-4ead-8359-6bdc75cd7778 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.240827] env[68285]: DEBUG nova.compute.manager [req-98d025a5-a59b-4ff2-b72e-64d9b2411540 req-24fa9f9d-cd82-4be5-88de-a57700463dfe service nova] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Detach interface failed, port_id=20c0e4b6-0469-4732-b880-d852321ef348, reason: Instance 2eec5d74-b1b8-4714-aaf1-687ec56ad860 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1197.280274] env[68285]: DEBUG nova.objects.base [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1197.290941] env[68285]: DEBUG oslo_vmware.api [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Task: {'id': task-2892235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346877} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.291987] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1197.292565] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1197.292825] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1197.293136] env[68285]: INFO nova.compute.manager [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1197.293374] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1197.293943] env[68285]: DEBUG nova.compute.manager [-] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1197.294118] env[68285]: DEBUG nova.network.neutron [-] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1197.306648] env[68285]: DEBUG nova.compute.manager [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1197.306862] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1197.307785] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27b3412-35cc-40fc-81ce-54f0c8c158d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.315215] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1197.315475] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00f2b81f-fb57-4c95-ab66-dfa57d6228bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.323385] env[68285]: DEBUG oslo_vmware.api [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1197.323385] env[68285]: value = "task-2892236" [ 1197.323385] env[68285]: _type = "Task" [ 1197.323385] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.333290] env[68285]: DEBUG oslo_vmware.api [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.427321] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "refresh_cache-ef87ff30-ef45-4abb-8696-d5493572703a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.428172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "refresh_cache-ef87ff30-ef45-4abb-8696-d5493572703a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.428488] env[68285]: DEBUG nova.network.neutron [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1197.494159] env[68285]: INFO nova.compute.manager [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Took 29.44 seconds to build instance. [ 1197.498295] env[68285]: DEBUG nova.scheduler.client.report [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1197.498562] env[68285]: DEBUG nova.compute.provider_tree [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 134 to 135 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1197.498740] env[68285]: DEBUG nova.compute.provider_tree [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1197.622021] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52caaaf7-9643-f1a8-80f5-ec4966dcf3b8, 'name': SearchDatastore_Task, 'duration_secs': 0.010287} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.622873] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.622873] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 68aee959-4168-43a7-a8d1-e6e126a52da5/68aee959-4168-43a7-a8d1-e6e126a52da5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1197.623200] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af7d3062-3b21-42ed-9446-4112520ffd46 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.630183] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1197.630183] env[68285]: value = "task-2892237" [ 1197.630183] env[68285]: _type = "Task" [ 1197.630183] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.638123] env[68285]: DEBUG oslo_concurrency.lockutils [req-530c68e7-1eba-4779-ba67-5a91eb4fe778 req-1581bf5a-46ae-4297-8929-6fc40131c26f service nova] Releasing lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.638629] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.668997] env[68285]: INFO nova.compute.manager [-] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Took 1.41 seconds to deallocate network for instance. [ 1197.834311] env[68285]: DEBUG oslo_vmware.api [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892236, 'name': PowerOffVM_Task, 'duration_secs': 0.228181} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.834846] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1197.837858] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1197.837858] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7355575f-9d81-42fc-a3fa-12eb8be26128 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.904030] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1197.904030] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1197.904030] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleting the datastore file [datastore2] 9c190abd-23ee-4e8e-8b91-9050847581d5 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1197.904030] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5243add5-52c4-4e24-904b-07425d0fe9f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.913976] env[68285]: DEBUG oslo_vmware.api [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1197.913976] env[68285]: value = "task-2892239" [ 1197.913976] env[68285]: _type = "Task" [ 1197.913976] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.925855] env[68285]: DEBUG oslo_vmware.api [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.979033] env[68285]: DEBUG nova.network.neutron [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1197.997154] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ff311756-1792-49fc-90c5-39caa7152d88 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.949s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.016598] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.535s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.016598] env[68285]: INFO nova.compute.manager [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Migrating [ 1198.025393] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.051s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.025766] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.029783] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.287s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.030180] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.035056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.023s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.035856] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.039507] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.146s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.039969] env[68285]: DEBUG nova.objects.instance [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lazy-loading 'resources' on Instance uuid 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.075588] env[68285]: INFO nova.scheduler.client.report [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Deleted allocations for instance 75b9c202-b50d-4c59-b3ef-03e61225a1dc [ 1198.080563] env[68285]: INFO nova.scheduler.client.report [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted allocations for instance d0b04097-292a-47e7-8f14-199b1650dc2c [ 1198.140437] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892237, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.176485] env[68285]: DEBUG nova.network.neutron [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.178743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.209963] env[68285]: DEBUG nova.network.neutron [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Updating instance_info_cache with network_info: [{"id": "0399b6c2-519b-440a-a775-e9fb28623777", "address": "fa:16:3e:e2:00:ea", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0399b6c2-51", "ovs_interfaceid": "0399b6c2-519b-440a-a775-e9fb28623777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.425218] env[68285]: DEBUG oslo_vmware.api [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392408} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.425556] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.425740] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1198.425930] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1198.426146] env[68285]: INFO nova.compute.manager [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1198.426582] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1198.426623] env[68285]: DEBUG nova.compute.manager [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1198.426733] env[68285]: DEBUG nova.network.neutron [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1198.461242] env[68285]: DEBUG nova.compute.manager [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Received event network-vif-plugged-0399b6c2-519b-440a-a775-e9fb28623777 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1198.461484] env[68285]: DEBUG oslo_concurrency.lockutils [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] Acquiring lock "ef87ff30-ef45-4abb-8696-d5493572703a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.461680] env[68285]: DEBUG oslo_concurrency.lockutils [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] Lock "ef87ff30-ef45-4abb-8696-d5493572703a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.461865] env[68285]: DEBUG oslo_concurrency.lockutils [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] Lock "ef87ff30-ef45-4abb-8696-d5493572703a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.462099] env[68285]: DEBUG nova.compute.manager [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] No waiting events found dispatching network-vif-plugged-0399b6c2-519b-440a-a775-e9fb28623777 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1198.462292] env[68285]: WARNING nova.compute.manager [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Received unexpected event network-vif-plugged-0399b6c2-519b-440a-a775-e9fb28623777 for instance with vm_state building and task_state spawning. [ 1198.462473] env[68285]: DEBUG nova.compute.manager [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Received event network-changed-0399b6c2-519b-440a-a775-e9fb28623777 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1198.462640] env[68285]: DEBUG nova.compute.manager [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Refreshing instance network info cache due to event network-changed-0399b6c2-519b-440a-a775-e9fb28623777. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1198.462896] env[68285]: DEBUG oslo_concurrency.lockutils [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] Acquiring lock "refresh_cache-ef87ff30-ef45-4abb-8696-d5493572703a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.524952] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afaf3426-9147-4f68-b850-2e6113c30991 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.535485] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6c86d0de-fccb-4ee0-96b1-6ff1d1d5b3ed tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Suspending the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1198.535857] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-aeaf1653-6e4f-46dc-83ec-f1c6d6e6272c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.549141] env[68285]: DEBUG oslo_vmware.api [None req-6c86d0de-fccb-4ee0-96b1-6ff1d1d5b3ed tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1198.549141] env[68285]: value = "task-2892240" [ 1198.549141] env[68285]: _type = "Task" [ 1198.549141] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.554864] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3007d503-b1d8-42d8-937e-4056750066c9 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 37.579s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.566976] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 13.618s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.567857] env[68285]: INFO nova.compute.manager [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Unshelving [ 1198.571312] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.571496] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.571871] env[68285]: DEBUG nova.network.neutron [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1198.579041] env[68285]: DEBUG oslo_vmware.api [None req-6c86d0de-fccb-4ee0-96b1-6ff1d1d5b3ed tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892240, 'name': SuspendVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.589767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ed64b08-0940-46de-8815-339805e25b34 tempest-ServerGroupTestJSON-805707541 tempest-ServerGroupTestJSON-805707541-project-member] Lock "75b9c202-b50d-4c59-b3ef-03e61225a1dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.218s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.595793] env[68285]: DEBUG oslo_concurrency.lockutils [None req-938b86bb-c78c-4fe9-9ffb-0e3ca1883cad tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "d0b04097-292a-47e7-8f14-199b1650dc2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.706s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.645957] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.733443} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.649177] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 68aee959-4168-43a7-a8d1-e6e126a52da5/68aee959-4168-43a7-a8d1-e6e126a52da5.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1198.649643] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1198.650571] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-316a65d5-da31-46bb-9b7c-68d9420aac17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.657961] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1198.657961] env[68285]: value = "task-2892241" [ 1198.657961] env[68285]: _type = "Task" [ 1198.657961] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.672438] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.681131] env[68285]: DEBUG oslo_concurrency.lockutils [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.716066] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "refresh_cache-ef87ff30-ef45-4abb-8696-d5493572703a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.716066] env[68285]: DEBUG nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Instance network_info: |[{"id": "0399b6c2-519b-440a-a775-e9fb28623777", "address": "fa:16:3e:e2:00:ea", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0399b6c2-51", "ovs_interfaceid": "0399b6c2-519b-440a-a775-e9fb28623777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1198.716066] env[68285]: DEBUG oslo_concurrency.lockutils [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] Acquired lock "refresh_cache-ef87ff30-ef45-4abb-8696-d5493572703a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.716066] env[68285]: DEBUG nova.network.neutron [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Refreshing network info cache for port 0399b6c2-519b-440a-a775-e9fb28623777 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1198.716066] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:00:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0399b6c2-519b-440a-a775-e9fb28623777', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1198.730686] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1198.735376] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1198.736718] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ff19211-e987-4008-8e74-3f1221eca81a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.758193] env[68285]: DEBUG nova.network.neutron [-] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.767421] env[68285]: DEBUG nova.compute.manager [req-0bbce9bf-fced-4732-aff2-ac4f3365112f req-4a90aa8f-5f59-45fe-b6f7-3b32264bff05 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Received event network-vif-deleted-569e9535-6252-4998-9567-e57ffca9a73b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1198.767421] env[68285]: INFO nova.compute.manager [req-0bbce9bf-fced-4732-aff2-ac4f3365112f req-4a90aa8f-5f59-45fe-b6f7-3b32264bff05 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Neutron deleted interface 569e9535-6252-4998-9567-e57ffca9a73b; detaching it from the instance and deleting it from the info cache [ 1198.767421] env[68285]: DEBUG nova.network.neutron [req-0bbce9bf-fced-4732-aff2-ac4f3365112f req-4a90aa8f-5f59-45fe-b6f7-3b32264bff05 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.770660] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1198.770660] env[68285]: value = "task-2892242" [ 1198.770660] env[68285]: _type = "Task" [ 1198.770660] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.788421] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892242, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.929486] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ef872c-df4d-495a-ae42-c86c2bf6af82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.938852] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b07eb1a-f03a-4539-b313-898084f12b86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.979767] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d38a941-2d2d-45f6-894b-743be9ed5c70 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.989250] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06354f85-1c29-4173-9f13-104bbdfebb31 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.009038] env[68285]: DEBUG nova.compute.provider_tree [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1199.063280] env[68285]: DEBUG oslo_vmware.api [None req-6c86d0de-fccb-4ee0-96b1-6ff1d1d5b3ed tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892240, 'name': SuspendVM_Task} progress is 45%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.170061] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13842} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.170395] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1199.171286] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717c2112-a3ce-453a-960b-f6cd2c316f2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.198541] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 68aee959-4168-43a7-a8d1-e6e126a52da5/68aee959-4168-43a7-a8d1-e6e126a52da5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1199.199939] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4cbbe67-faae-4131-86c0-ed1d83947b0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.224979] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1199.224979] env[68285]: value = "task-2892243" [ 1199.224979] env[68285]: _type = "Task" [ 1199.224979] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.234364] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892243, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.261676] env[68285]: INFO nova.compute.manager [-] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Took 1.97 seconds to deallocate network for instance. [ 1199.272670] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3a9d3e9-86a9-48fb-8c63-adeeef79a20d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.291384] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892242, 'name': CreateVM_Task, 'duration_secs': 0.430054} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.291695] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1199.294564] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b6b821-f5aa-4b32-9a67-6920a0664436 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.308545] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.309109] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.309183] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1199.312671] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-379de8a6-8b0a-4876-acc3-dcdba0b4e32c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.319855] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1199.319855] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5208434e-decc-3c4f-91ba-061a5ef247d6" [ 1199.319855] env[68285]: _type = "Task" [ 1199.319855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.329677] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5208434e-decc-3c4f-91ba-061a5ef247d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.347434] env[68285]: DEBUG nova.compute.manager [req-0bbce9bf-fced-4732-aff2-ac4f3365112f req-4a90aa8f-5f59-45fe-b6f7-3b32264bff05 service nova] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Detach interface failed, port_id=569e9535-6252-4998-9567-e57ffca9a73b, reason: Instance 7790f1e6-c73f-40d6-97af-00e9c518a09c could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1199.473563] env[68285]: DEBUG nova.network.neutron [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance_info_cache with network_info: [{"id": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "address": "fa:16:3e:6f:50:a1", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9199e860-a7", "ovs_interfaceid": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.551687] env[68285]: DEBUG nova.scheduler.client.report [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1199.552195] env[68285]: DEBUG nova.compute.provider_tree [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 135 to 136 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1199.552360] env[68285]: DEBUG nova.compute.provider_tree [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1199.569827] env[68285]: DEBUG oslo_vmware.api [None req-6c86d0de-fccb-4ee0-96b1-6ff1d1d5b3ed tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892240, 'name': SuspendVM_Task, 'duration_secs': 1.018517} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.570125] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6c86d0de-fccb-4ee0-96b1-6ff1d1d5b3ed tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Suspended the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1199.572948] env[68285]: DEBUG nova.compute.manager [None req-6c86d0de-fccb-4ee0-96b1-6ff1d1d5b3ed tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1199.572948] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de8715a-414e-4e50-b3d2-ed06aedaeae5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.605704] env[68285]: DEBUG nova.network.neutron [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.611797] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.647475] env[68285]: DEBUG nova.network.neutron [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Updated VIF entry in instance network info cache for port 0399b6c2-519b-440a-a775-e9fb28623777. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1199.647475] env[68285]: DEBUG nova.network.neutron [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Updating instance_info_cache with network_info: [{"id": "0399b6c2-519b-440a-a775-e9fb28623777", "address": "fa:16:3e:e2:00:ea", "network": {"id": "528fb0dd-bf48-4e2b-a5e8-9eda19d349e0", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1888988435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07b5865cc5804d8d98073e5d0c1449aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0399b6c2-51", "ovs_interfaceid": "0399b6c2-519b-440a-a775-e9fb28623777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.692285] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1199.692285] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67e25052-b52a-4fb4-adde-341beaac826b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.698364] env[68285]: DEBUG oslo_vmware.api [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1199.698364] env[68285]: value = "task-2892244" [ 1199.698364] env[68285]: _type = "Task" [ 1199.698364] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.708095] env[68285]: DEBUG oslo_vmware.api [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.735062] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892243, 'name': ReconfigVM_Task, 'duration_secs': 0.443811} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.735379] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 68aee959-4168-43a7-a8d1-e6e126a52da5/68aee959-4168-43a7-a8d1-e6e126a52da5.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.736396] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28e728e6-c96b-47f5-a639-6c042e8ace4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.747407] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1199.747407] env[68285]: value = "task-2892245" [ 1199.747407] env[68285]: _type = "Task" [ 1199.747407] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.758977] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892245, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.770183] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.831610] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5208434e-decc-3c4f-91ba-061a5ef247d6, 'name': SearchDatastore_Task, 'duration_secs': 0.014708} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.832028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.832292] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1199.832529] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.832670] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.832842] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1199.833169] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a571ea3f-1cf4-4aba-ac8f-42b98c30d000 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.844095] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1199.844095] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1199.844095] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f2d6b69-df41-4fa4-b9b7-02df1b3aa368 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.853724] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1199.853724] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524e1990-7c84-7a3e-ce37-c77e1ff2eb82" [ 1199.853724] env[68285]: _type = "Task" [ 1199.853724] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.864761] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524e1990-7c84-7a3e-ce37-c77e1ff2eb82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.978295] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.060264] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.020s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.063892] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.748s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.066400] env[68285]: INFO nova.compute.claims [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1200.086471] env[68285]: INFO nova.scheduler.client.report [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Deleted allocations for instance 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d [ 1200.109024] env[68285]: INFO nova.compute.manager [-] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Took 1.68 seconds to deallocate network for instance. [ 1200.149285] env[68285]: DEBUG oslo_concurrency.lockutils [req-747fd9bc-e815-4d6b-9e09-0049576e9797 req-2794212a-01c0-4a33-a558-359f072ec9ba service nova] Releasing lock "refresh_cache-ef87ff30-ef45-4abb-8696-d5493572703a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.209904] env[68285]: DEBUG oslo_vmware.api [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892244, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.258616] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892245, 'name': Rename_Task, 'duration_secs': 0.190865} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.258913] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1200.259198] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2165ea51-0eea-4692-861b-ebcaa90f9bba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.267216] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1200.267216] env[68285]: value = "task-2892246" [ 1200.267216] env[68285]: _type = "Task" [ 1200.267216] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.275343] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892246, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.364061] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524e1990-7c84-7a3e-ce37-c77e1ff2eb82, 'name': SearchDatastore_Task, 'duration_secs': 0.014322} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.364890] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea7d785d-f922-44e2-b7c2-1b0d2e026766 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.370878] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1200.370878] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521cfb6e-0490-7b1f-b479-a855300982ae" [ 1200.370878] env[68285]: _type = "Task" [ 1200.370878] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.379081] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521cfb6e-0490-7b1f-b479-a855300982ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.607115] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7b6c2005-251b-4809-8583-5434a747e8da tempest-AttachInterfacesV270Test-2099827642 tempest-AttachInterfacesV270Test-2099827642-project-member] Lock "08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.187s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.622079] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.712109] env[68285]: DEBUG oslo_vmware.api [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892244, 'name': PowerOnVM_Task, 'duration_secs': 0.847525} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.712109] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1200.712109] env[68285]: DEBUG nova.compute.manager [None req-31cc9bf4-677f-4370-a257-23d1863f61ff tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1200.712268] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339a6c57-67a4-42e3-b0c0-be8727fd349f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.778360] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892246, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.882059] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521cfb6e-0490-7b1f-b479-a855300982ae, 'name': SearchDatastore_Task, 'duration_secs': 0.024535} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.883473] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.883780] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ef87ff30-ef45-4abb-8696-d5493572703a/ef87ff30-ef45-4abb-8696-d5493572703a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1200.885037] env[68285]: DEBUG nova.compute.manager [req-b6ecb19a-b655-4e37-ac8e-29dc928b0b0d req-3d57ddb5-e12e-40fb-85e4-652678577587 service nova] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Received event network-vif-deleted-a9d34554-5a11-451d-b371-8a0cdfc63de6 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.885333] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67fcd565-6b2e-46f6-86c6-c2ad3b960ac6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.893120] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1200.893120] env[68285]: value = "task-2892247" [ 1200.893120] env[68285]: _type = "Task" [ 1200.893120] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.904442] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.278169] env[68285]: DEBUG oslo_vmware.api [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892246, 'name': PowerOnVM_Task, 'duration_secs': 0.665828} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.284802] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1201.286523] env[68285]: INFO nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1201.290337] env[68285]: DEBUG nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1201.290337] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdd76ec-c682-4110-afd0-db729b78482e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.299788] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.300172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.300408] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.300617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.300860] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.303367] env[68285]: INFO nova.compute.manager [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Terminating instance [ 1201.406808] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892247, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.418874] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b2c42a-0a46-4f03-9ca8-37eec38f60bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.428804] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ee9974-c527-43f8-b20d-a95be16c5455 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.463215] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec463c4f-d979-4ae9-9282-b0490053c93b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.471682] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423891e9-f2a0-4ad5-b00d-81862c24dbe3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.487486] env[68285]: DEBUG nova.compute.provider_tree [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.493660] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23752910-39af-49cc-94a4-7c06e0ca7db5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.513932] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance 'fe8e0a71-e9b0-4035-a696-51455d6fc473' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.811599] env[68285]: INFO nova.compute.manager [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Took 27.46 seconds to build instance. [ 1201.815331] env[68285]: DEBUG nova.compute.manager [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1201.815949] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.816771] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a03cbb2-1d91-4981-ab83-2b0f9421efda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.824161] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.824946] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a31121f5-3f42-4a35-8f10-92630cff6b7c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.900108] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.900382] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.900567] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleting the datastore file [datastore2] fe6c495f-6917-4e3d-acce-7487a45e3ef4 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.903815] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61fd2e0c-1802-4a27-9314-f40655a223ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.905588] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892247, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.910546] env[68285]: DEBUG oslo_vmware.api [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1201.910546] env[68285]: value = "task-2892249" [ 1201.910546] env[68285]: _type = "Task" [ 1201.910546] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.918786] env[68285]: DEBUG oslo_vmware.api [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892249, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.993105] env[68285]: DEBUG nova.scheduler.client.report [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.021023] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1202.021023] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f11e92e7-2f4b-4f6a-8601-e9481bd302fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.029052] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1202.029052] env[68285]: value = "task-2892250" [ 1202.029052] env[68285]: _type = "Task" [ 1202.029052] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.036937] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.313604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c133fe12-2aba-4e66-98fe-7492a59f4ee1 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.974s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.406201] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892247, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.419654] env[68285]: DEBUG oslo_vmware.api [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892249, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.426817} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.420090] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.420312] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.420565] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.420763] env[68285]: INFO nova.compute.manager [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1202.421107] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1202.421359] env[68285]: DEBUG nova.compute.manager [-] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1202.421484] env[68285]: DEBUG nova.network.neutron [-] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.500027] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.500027] env[68285]: DEBUG nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1202.500756] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.725s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.502245] env[68285]: INFO nova.compute.claims [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1202.544911] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892250, 'name': PowerOffVM_Task, 'duration_secs': 0.259813} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.546255] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1202.546452] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance 'fe8e0a71-e9b0-4035-a696-51455d6fc473' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1202.909405] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892247, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.758545} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.909682] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] ef87ff30-ef45-4abb-8696-d5493572703a/ef87ff30-ef45-4abb-8696-d5493572703a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1202.909894] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1202.910294] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfb3b391-7db5-4809-80ac-08d367b07e52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.917594] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1202.917594] env[68285]: value = "task-2892251" [ 1202.917594] env[68285]: _type = "Task" [ 1202.917594] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.925520] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.006764] env[68285]: DEBUG nova.compute.utils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1203.010767] env[68285]: DEBUG nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1203.010999] env[68285]: DEBUG nova.network.neutron [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1203.049188] env[68285]: DEBUG nova.policy [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9aef349348af4f138b71a8b257300b03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0142f80018fe4d41830f10307dd482f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1203.057393] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1203.057603] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.057759] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1203.057939] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.059389] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1203.059582] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1203.059801] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1203.059962] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1203.060148] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1203.060306] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1203.060474] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1203.069122] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffdc87af-f301-4f2d-9944-091240e9d5d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.088314] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1203.088314] env[68285]: value = "task-2892252" [ 1203.088314] env[68285]: _type = "Task" [ 1203.088314] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.098124] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892252, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.386992] env[68285]: DEBUG nova.network.neutron [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Successfully created port: 1d10105d-1754-49c2-9593-7de22107732e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1203.429298] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177385} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.430397] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1203.431350] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d15977-47fb-4e93-83cc-efe5659a6bcd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.455195] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] ef87ff30-ef45-4abb-8696-d5493572703a/ef87ff30-ef45-4abb-8696-d5493572703a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.456084] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30a829ac-439d-4cd6-b682-161cb2e932bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.480422] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1203.480422] env[68285]: value = "task-2892253" [ 1203.480422] env[68285]: _type = "Task" [ 1203.480422] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.495675] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.514024] env[68285]: DEBUG nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1203.544494] env[68285]: DEBUG nova.compute.manager [req-6d66f928-5457-4a8e-8a58-d2dc64ef33dd req-f73a7dc5-5b3a-44d8-a7a2-465529a45f05 service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Received event network-vif-deleted-593ddaf0-2e7b-40c6-9df3-82aabf50fefa {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1203.544494] env[68285]: INFO nova.compute.manager [req-6d66f928-5457-4a8e-8a58-d2dc64ef33dd req-f73a7dc5-5b3a-44d8-a7a2-465529a45f05 service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Neutron deleted interface 593ddaf0-2e7b-40c6-9df3-82aabf50fefa; detaching it from the instance and deleting it from the info cache [ 1203.544494] env[68285]: DEBUG nova.network.neutron [req-6d66f928-5457-4a8e-8a58-d2dc64ef33dd req-f73a7dc5-5b3a-44d8-a7a2-465529a45f05 service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.598253] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892252, 'name': ReconfigVM_Task, 'duration_secs': 0.301838} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.601367] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance 'fe8e0a71-e9b0-4035-a696-51455d6fc473' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.805062] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf479266-eece-4681-a81a-6a17654ed444 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.814968] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956b0a24-5bf2-495f-b68b-e37a1e375de1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.853945] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172ffb9d-379f-4dea-b691-be6368bc5474 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.862830] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d821c118-9916-4f66-9dbb-faa1a9a262bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.878798] env[68285]: DEBUG nova.compute.provider_tree [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1203.918419] env[68285]: DEBUG nova.network.neutron [-] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.990162] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892253, 'name': ReconfigVM_Task, 'duration_secs': 0.470731} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.990457] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Reconfigured VM instance instance-00000061 to attach disk [datastore1] ef87ff30-ef45-4abb-8696-d5493572703a/ef87ff30-ef45-4abb-8696-d5493572703a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1203.991193] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-482c4486-77b8-49e7-9b2f-b13b5c8422be {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.998054] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1203.998054] env[68285]: value = "task-2892254" [ 1203.998054] env[68285]: _type = "Task" [ 1203.998054] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.008666] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892254, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.049840] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8adffee5-b72e-43ad-a3ca-b9d9cfd3295d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.060023] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a914bc5a-658b-4903-852b-2db13caa46fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.097957] env[68285]: DEBUG nova.compute.manager [req-6d66f928-5457-4a8e-8a58-d2dc64ef33dd req-f73a7dc5-5b3a-44d8-a7a2-465529a45f05 service nova] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Detach interface failed, port_id=593ddaf0-2e7b-40c6-9df3-82aabf50fefa, reason: Instance fe6c495f-6917-4e3d-acce-7487a45e3ef4 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1204.108256] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.108517] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.108710] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.108960] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.109154] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.109312] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.109515] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.109669] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.109831] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.109989] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.110176] env[68285]: DEBUG nova.virt.hardware [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.115758] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1204.116307] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aee6a285-4bc1-44ee-bcd0-ecf290ec4100 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.135292] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1204.135292] env[68285]: value = "task-2892255" [ 1204.135292] env[68285]: _type = "Task" [ 1204.135292] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.143940] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892255, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.382164] env[68285]: DEBUG nova.scheduler.client.report [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.420879] env[68285]: INFO nova.compute.manager [-] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Took 2.00 seconds to deallocate network for instance. [ 1204.508070] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892254, 'name': Rename_Task, 'duration_secs': 0.274849} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.508347] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1204.508581] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81a7daad-82db-459f-b6af-17098b62b386 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.515526] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1204.515526] env[68285]: value = "task-2892256" [ 1204.515526] env[68285]: _type = "Task" [ 1204.515526] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.524620] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.527188] env[68285]: DEBUG nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1204.558397] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.558648] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.558806] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.559060] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.559408] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.559408] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.559581] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.559742] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.559906] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.560081] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.560260] env[68285]: DEBUG nova.virt.hardware [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.562036] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cce72a-2387-4c87-aebb-fc3a6008a928 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.571045] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6521b7-0994-4759-b054-b279572f2da5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.645546] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892255, 'name': ReconfigVM_Task, 'duration_secs': 0.192457} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.645837] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1204.646646] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002d8950-dd8c-4fd8-b353-1b5cf50a4287 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.670029] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] fe8e0a71-e9b0-4035-a696-51455d6fc473/fe8e0a71-e9b0-4035-a696-51455d6fc473.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1204.670029] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f85c4e1e-b642-4e9f-95d0-ba28b3f8d56c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.688537] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1204.688537] env[68285]: value = "task-2892257" [ 1204.688537] env[68285]: _type = "Task" [ 1204.688537] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.697285] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.887240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.887956] env[68285]: DEBUG nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1204.893503] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.694s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.894202] env[68285]: DEBUG nova.objects.instance [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lazy-loading 'resources' on Instance uuid 9175fd25-a00c-4a2c-b779-56e6541dcaa1 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.930371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.029031] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892256, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.045644] env[68285]: DEBUG nova.compute.manager [req-565d1918-b710-4e75-a54c-ac5ca43cf574 req-f3a871e5-ac9e-4bb8-844b-1a5d8b66eaa8 service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Received event network-vif-plugged-1d10105d-1754-49c2-9593-7de22107732e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1205.045951] env[68285]: DEBUG oslo_concurrency.lockutils [req-565d1918-b710-4e75-a54c-ac5ca43cf574 req-f3a871e5-ac9e-4bb8-844b-1a5d8b66eaa8 service nova] Acquiring lock "801f524e-28b5-4452-b880-0fc30d3c5eef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.046102] env[68285]: DEBUG oslo_concurrency.lockutils [req-565d1918-b710-4e75-a54c-ac5ca43cf574 req-f3a871e5-ac9e-4bb8-844b-1a5d8b66eaa8 service nova] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.046241] env[68285]: DEBUG oslo_concurrency.lockutils [req-565d1918-b710-4e75-a54c-ac5ca43cf574 req-f3a871e5-ac9e-4bb8-844b-1a5d8b66eaa8 service nova] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.046455] env[68285]: DEBUG nova.compute.manager [req-565d1918-b710-4e75-a54c-ac5ca43cf574 req-f3a871e5-ac9e-4bb8-844b-1a5d8b66eaa8 service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] No waiting events found dispatching network-vif-plugged-1d10105d-1754-49c2-9593-7de22107732e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1205.046644] env[68285]: WARNING nova.compute.manager [req-565d1918-b710-4e75-a54c-ac5ca43cf574 req-f3a871e5-ac9e-4bb8-844b-1a5d8b66eaa8 service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Received unexpected event network-vif-plugged-1d10105d-1754-49c2-9593-7de22107732e for instance with vm_state building and task_state spawning. [ 1205.134747] env[68285]: DEBUG nova.network.neutron [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Successfully updated port: 1d10105d-1754-49c2-9593-7de22107732e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1205.198606] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892257, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.397044] env[68285]: DEBUG nova.compute.utils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1205.403697] env[68285]: DEBUG nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1205.403697] env[68285]: DEBUG nova.network.neutron [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1205.454850] env[68285]: DEBUG nova.policy [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9aef349348af4f138b71a8b257300b03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0142f80018fe4d41830f10307dd482f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1205.528443] env[68285]: DEBUG oslo_vmware.api [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892256, 'name': PowerOnVM_Task, 'duration_secs': 0.934406} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.531859] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1205.532487] env[68285]: INFO nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1205.532738] env[68285]: DEBUG nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1205.534124] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0610e7-014f-4663-86f0-631ca5a7db11 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.580131] env[68285]: DEBUG nova.compute.manager [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Received event network-changed-47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1205.580131] env[68285]: DEBUG nova.compute.manager [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Refreshing instance network info cache due to event network-changed-47dedd89-6346-46ef-93a1-287c2727d7cc. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1205.580131] env[68285]: DEBUG oslo_concurrency.lockutils [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] Acquiring lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.580131] env[68285]: DEBUG oslo_concurrency.lockutils [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] Acquired lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.580131] env[68285]: DEBUG nova.network.neutron [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Refreshing network info cache for port 47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1205.636676] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.636676] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.636676] env[68285]: DEBUG nova.network.neutron [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1205.700459] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892257, 'name': ReconfigVM_Task, 'duration_secs': 0.543556} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.701616] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Reconfigured VM instance instance-0000005b to attach disk [datastore2] fe8e0a71-e9b0-4035-a696-51455d6fc473/fe8e0a71-e9b0-4035-a696-51455d6fc473.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1205.701913] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance 'fe8e0a71-e9b0-4035-a696-51455d6fc473' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1205.705603] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4769a710-ad5c-4d42-a02c-39a746ae266a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.716527] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ee74ca-47b7-4b5a-ba43-12f1c0ad8a32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.751902] env[68285]: DEBUG nova.network.neutron [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Successfully created port: 4ee7857e-7e56-4be9-bc5b-a3963713b734 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1205.754456] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519d8ff0-f747-4b1d-b69f-a67cc8e1b138 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.763553] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f6d952-c4bd-4bea-b53e-a3c01a1b9d0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.777674] env[68285]: DEBUG nova.compute.provider_tree [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.905295] env[68285]: DEBUG nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1206.051550] env[68285]: INFO nova.compute.manager [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Took 28.57 seconds to build instance. [ 1206.177161] env[68285]: DEBUG nova.network.neutron [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1206.214834] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a73ff5-8c3c-458e-b6a6-e30902185f56 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.245593] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b7d485-761d-4ad4-b465-fad8537fceb9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.265288] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance 'fe8e0a71-e9b0-4035-a696-51455d6fc473' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1206.281960] env[68285]: DEBUG nova.scheduler.client.report [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1206.364249] env[68285]: DEBUG nova.network.neutron [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Updating instance_info_cache with network_info: [{"id": "1d10105d-1754-49c2-9593-7de22107732e", "address": "fa:16:3e:38:96:08", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10105d-17", "ovs_interfaceid": "1d10105d-1754-49c2-9593-7de22107732e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.553754] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ea545583-e5ae-479d-8281-793833a6a301 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ef87ff30-ef45-4abb-8696-d5493572703a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.087s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.587367] env[68285]: DEBUG nova.network.neutron [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updated VIF entry in instance network info cache for port 47dedd89-6346-46ef-93a1-287c2727d7cc. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1206.587832] env[68285]: DEBUG nova.network.neutron [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updating instance_info_cache with network_info: [{"id": "47dedd89-6346-46ef-93a1-287c2727d7cc", "address": "fa:16:3e:2c:83:3a", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47dedd89-63", "ovs_interfaceid": "47dedd89-6346-46ef-93a1-287c2727d7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.646869] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288bc59-e9a9-c0f3-e925-89c0e002b6e0/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1206.647779] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af1b8e5-d55d-40c7-a78a-69ee897191ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.655025] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288bc59-e9a9-c0f3-e925-89c0e002b6e0/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1206.655160] env[68285]: ERROR oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288bc59-e9a9-c0f3-e925-89c0e002b6e0/disk-0.vmdk due to incomplete transfer. [ 1206.655303] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-efe58d50-9de4-4a8e-b521-6ef05bfa9c2b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.662988] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288bc59-e9a9-c0f3-e925-89c0e002b6e0/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1206.663384] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Uploaded image 4efe1c74-2846-4e11-a589-faa9aa03604d to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1206.666050] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1206.666176] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c12ad710-e0b8-450e-be1c-c16af347a23a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.673424] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1206.673424] env[68285]: value = "task-2892258" [ 1206.673424] env[68285]: _type = "Task" [ 1206.673424] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.681745] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892258, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.787139] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.896s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.789426] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.611s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.789667] env[68285]: DEBUG nova.objects.instance [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'resources' on Instance uuid 2eec5d74-b1b8-4714-aaf1-687ec56ad860 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.812044] env[68285]: INFO nova.scheduler.client.report [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Deleted allocations for instance 9175fd25-a00c-4a2c-b779-56e6541dcaa1 [ 1206.850239] env[68285]: DEBUG nova.network.neutron [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Port 9199e860-a70a-4057-93f0-526a4c8a2ed7 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1206.866931] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.867382] env[68285]: DEBUG nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Instance network_info: |[{"id": "1d10105d-1754-49c2-9593-7de22107732e", "address": "fa:16:3e:38:96:08", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10105d-17", "ovs_interfaceid": "1d10105d-1754-49c2-9593-7de22107732e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1206.867717] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:96:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d10105d-1754-49c2-9593-7de22107732e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1206.877518] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating folder: Project (0142f80018fe4d41830f10307dd482f4). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1206.878969] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-694752d7-8133-41ff-ae69-9bf59613c662 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.894024] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created folder: Project (0142f80018fe4d41830f10307dd482f4) in parent group-v580775. [ 1206.894024] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating folder: Instances. Parent ref: group-v581040. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1206.894024] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3967eb63-174b-4d92-9f86-fa72c264cc88 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.902730] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created folder: Instances in parent group-v581040. [ 1206.904693] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1206.904693] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1206.904693] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2673e72c-9799-496c-b8b7-6f28c9eec7fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.923407] env[68285]: DEBUG nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1206.933226] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1206.933226] env[68285]: value = "task-2892261" [ 1206.933226] env[68285]: _type = "Task" [ 1206.933226] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.943383] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892261, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1206.966243] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1206.966946] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1206.970017] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1206.970017] env[68285]: DEBUG nova.virt.hardware [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1206.970017] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09d8024-0b53-4282-99c7-51b20e4cc595 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.978088] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51e8a97-a86c-4e6c-821f-d9ed11d21426 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.093647] env[68285]: DEBUG oslo_concurrency.lockutils [req-08bee5a9-1291-4bf3-941d-01970e18fd09 req-0dd5074c-eca2-4983-9a13-9559daff19cc service nova] Releasing lock "refresh_cache-68aee959-4168-43a7-a8d1-e6e126a52da5" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.113898] env[68285]: DEBUG nova.compute.manager [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Received event network-changed-1d10105d-1754-49c2-9593-7de22107732e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1207.114114] env[68285]: DEBUG nova.compute.manager [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Refreshing instance network info cache due to event network-changed-1d10105d-1754-49c2-9593-7de22107732e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1207.114369] env[68285]: DEBUG oslo_concurrency.lockutils [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] Acquiring lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.114518] env[68285]: DEBUG oslo_concurrency.lockutils [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] Acquired lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.114679] env[68285]: DEBUG nova.network.neutron [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Refreshing network info cache for port 1d10105d-1754-49c2-9593-7de22107732e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1207.187428] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892258, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.300233] env[68285]: DEBUG nova.network.neutron [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Successfully updated port: 4ee7857e-7e56-4be9-bc5b-a3963713b734 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1207.322776] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f9eab55b-7fcc-46af-b238-937909861913 tempest-ServerDiagnosticsNegativeTest-1773029254 tempest-ServerDiagnosticsNegativeTest-1773029254-project-member] Lock "9175fd25-a00c-4a2c-b779-56e6541dcaa1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.671s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.447392] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892261, 'name': CreateVM_Task, 'duration_secs': 0.36759} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.448629] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1207.448629] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.448629] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.448938] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1207.449871] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcc00f21-1844-4772-8d2b-a35c7c459d51 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.456318] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1207.456318] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bf905e-3110-417b-832a-67a241f181c5" [ 1207.456318] env[68285]: _type = "Task" [ 1207.456318] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.469398] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bf905e-3110-417b-832a-67a241f181c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.636944] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d68a8d-fb3c-4470-9273-d6bd4c7f02de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.645635] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f7dce6-1034-41a6-a8a7-5cba3198d1bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.685045] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e441ebda-d316-4330-a3f8-02d5286408d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.696022] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be72b753-b24d-4ab7-9b61-40128a345206 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.699586] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892258, 'name': Destroy_Task, 'duration_secs': 0.801399} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.699741] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Destroyed the VM [ 1207.699974] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1207.700579] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-812a519c-c0b2-4e81-bedb-b30bf2551e1f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.711356] env[68285]: DEBUG nova.compute.provider_tree [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.718072] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1207.718072] env[68285]: value = "task-2892262" [ 1207.718072] env[68285]: _type = "Task" [ 1207.718072] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.729115] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892262, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.802968] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.803136] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.803294] env[68285]: DEBUG nova.network.neutron [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1207.826650] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.826872] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.880920] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.881733] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.881733] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.890680] env[68285]: DEBUG nova.network.neutron [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Updated VIF entry in instance network info cache for port 1d10105d-1754-49c2-9593-7de22107732e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1207.890772] env[68285]: DEBUG nova.network.neutron [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Updating instance_info_cache with network_info: [{"id": "1d10105d-1754-49c2-9593-7de22107732e", "address": "fa:16:3e:38:96:08", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10105d-17", "ovs_interfaceid": "1d10105d-1754-49c2-9593-7de22107732e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.937489] env[68285]: DEBUG nova.compute.manager [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1207.938610] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19853dd0-d825-4e87-8993-e6723e56179e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.971110] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bf905e-3110-417b-832a-67a241f181c5, 'name': SearchDatastore_Task, 'duration_secs': 0.028621} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.971110] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.971110] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1207.971110] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.971110] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.971110] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1207.971448] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a519b193-22ca-4160-9314-26e9a42ca5ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.980521] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1207.980750] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1207.981551] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e28fff52-ddf5-465a-b740-49c72a961986 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.987672] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1207.987672] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ff7e8f-197b-e67a-2e16-1566efeb3285" [ 1207.987672] env[68285]: _type = "Task" [ 1207.987672] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.996307] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ff7e8f-197b-e67a-2e16-1566efeb3285, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.215560] env[68285]: DEBUG nova.scheduler.client.report [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.234356] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892262, 'name': RemoveSnapshot_Task} progress is 40%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.332029] env[68285]: DEBUG nova.compute.utils [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1208.352694] env[68285]: DEBUG nova.network.neutron [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1208.393673] env[68285]: DEBUG oslo_concurrency.lockutils [req-de94e27f-2a92-4a4c-b8e4-b5c89d190951 req-beef358d-4a52-46c3-86b8-0ba52a34311e service nova] Releasing lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.451420] env[68285]: INFO nova.compute.manager [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] instance snapshotting [ 1208.454477] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a95ed6b-32bd-47e7-8830-856204d72987 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.492858] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1651438f-6406-41b7-8a6c-4f55a5d9b948 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.501882] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ff7e8f-197b-e67a-2e16-1566efeb3285, 'name': SearchDatastore_Task, 'duration_secs': 0.01063} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.511793] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0af1be35-7131-4cdf-a869-8a7fc2d01479 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.521226] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1208.521226] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521cb163-bbb9-688e-975c-77eda9ff2bae" [ 1208.521226] env[68285]: _type = "Task" [ 1208.521226] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.529469] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521cb163-bbb9-688e-975c-77eda9ff2bae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.553989] env[68285]: DEBUG nova.network.neutron [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Updating instance_info_cache with network_info: [{"id": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "address": "fa:16:3e:eb:06:0e", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee7857e-7e", "ovs_interfaceid": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.733140] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.736250] env[68285]: DEBUG oslo_vmware.api [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892262, 'name': RemoveSnapshot_Task, 'duration_secs': 0.844025} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.736801] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.125s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.737034] env[68285]: DEBUG nova.objects.instance [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lazy-loading 'pci_requests' on Instance uuid be47df2a-aee7-4275-9acb-9cf74367f503 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.742372] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1208.742648] env[68285]: INFO nova.compute.manager [None req-7ec89614-3834-4b4d-b16f-5b35cd9ee4e8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Took 17.25 seconds to snapshot the instance on the hypervisor. [ 1208.768798] env[68285]: INFO nova.scheduler.client.report [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted allocations for instance 2eec5d74-b1b8-4714-aaf1-687ec56ad860 [ 1208.834233] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.877353] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "49831327-6e13-412e-ab83-bf350e6e9761" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.877353] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "49831327-6e13-412e-ab83-bf350e6e9761" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.939456] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.939654] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.939843] env[68285]: DEBUG nova.network.neutron [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1209.015363] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1209.016094] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-14f8a63e-f856-497a-89a4-6a24bfbf7c95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.025756] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1209.025756] env[68285]: value = "task-2892263" [ 1209.025756] env[68285]: _type = "Task" [ 1209.025756] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.033420] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521cb163-bbb9-688e-975c-77eda9ff2bae, 'name': SearchDatastore_Task, 'duration_secs': 0.021023} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.034187] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.034507] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/801f524e-28b5-4452-b880-0fc30d3c5eef.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1209.034771] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da9e2d48-a691-4c21-9d77-2992d84a3685 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.040220] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892263, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.044682] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1209.044682] env[68285]: value = "task-2892264" [ 1209.044682] env[68285]: _type = "Task" [ 1209.044682] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.054805] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.056721] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.056721] env[68285]: DEBUG nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Instance network_info: |[{"id": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "address": "fa:16:3e:eb:06:0e", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee7857e-7e", "ovs_interfaceid": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1209.057071] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:06:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ee7857e-7e56-4be9-bc5b-a3963713b734', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.064523] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1209.064748] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1209.064971] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-221d5262-0ddb-42ba-8e6c-29ff5f011b4a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.084643] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.084643] env[68285]: value = "task-2892265" [ 1209.084643] env[68285]: _type = "Task" [ 1209.084643] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.092301] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892265, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.248283] env[68285]: DEBUG nova.objects.instance [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lazy-loading 'numa_topology' on Instance uuid be47df2a-aee7-4275-9acb-9cf74367f503 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.275940] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ba193fdb-2063-4071-9f99-3fbf49b54ac7 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "2eec5d74-b1b8-4714-aaf1-687ec56ad860" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.949s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.379204] env[68285]: DEBUG nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1209.502041] env[68285]: DEBUG nova.compute.manager [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Received event network-vif-plugged-4ee7857e-7e56-4be9-bc5b-a3963713b734 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1209.502285] env[68285]: DEBUG oslo_concurrency.lockutils [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] Acquiring lock "feda1a98-3086-43a6-a887-f4d1602ca8ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.503219] env[68285]: DEBUG oslo_concurrency.lockutils [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.503694] env[68285]: DEBUG oslo_concurrency.lockutils [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.503907] env[68285]: DEBUG nova.compute.manager [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] No waiting events found dispatching network-vif-plugged-4ee7857e-7e56-4be9-bc5b-a3963713b734 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1209.504253] env[68285]: WARNING nova.compute.manager [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Received unexpected event network-vif-plugged-4ee7857e-7e56-4be9-bc5b-a3963713b734 for instance with vm_state building and task_state spawning. [ 1209.504486] env[68285]: DEBUG nova.compute.manager [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Received event network-changed-4ee7857e-7e56-4be9-bc5b-a3963713b734 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1209.504656] env[68285]: DEBUG nova.compute.manager [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Refreshing instance network info cache due to event network-changed-4ee7857e-7e56-4be9-bc5b-a3963713b734. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1209.504929] env[68285]: DEBUG oslo_concurrency.lockutils [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] Acquiring lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.508039] env[68285]: DEBUG oslo_concurrency.lockutils [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] Acquired lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.508039] env[68285]: DEBUG nova.network.neutron [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Refreshing network info cache for port 4ee7857e-7e56-4be9-bc5b-a3963713b734 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1209.538376] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892263, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.555676] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892264, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.594398] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892265, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.727019] env[68285]: DEBUG nova.network.neutron [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance_info_cache with network_info: [{"id": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "address": "fa:16:3e:6f:50:a1", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9199e860-a7", "ovs_interfaceid": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.755813] env[68285]: INFO nova.compute.claims [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1209.906307] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.955768] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.955768] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.956096] env[68285]: INFO nova.compute.manager [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Attaching volume ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa to /dev/sdb [ 1210.001258] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4b4f17-b0be-4e3b-af61-a3f4878745b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.010597] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb10e76f-4205-49b1-a172-82de9511125c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.029176] env[68285]: DEBUG nova.virt.block_device [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating existing volume attachment record: de72d42c-4d44-4c8b-8dbf-e65d2b5a364c {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1210.039921] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892263, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.056153] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648862} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.056415] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/801f524e-28b5-4452-b880-0fc30d3c5eef.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1210.056624] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1210.056882] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e4c2edb-77a6-4eb9-8d56-4e793dce7740 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.063779] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1210.063779] env[68285]: value = "task-2892266" [ 1210.063779] env[68285]: _type = "Task" [ 1210.063779] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.072803] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.095188] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892265, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.229943] env[68285]: DEBUG oslo_concurrency.lockutils [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.275079] env[68285]: DEBUG nova.network.neutron [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Updated VIF entry in instance network info cache for port 4ee7857e-7e56-4be9-bc5b-a3963713b734. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.277142] env[68285]: DEBUG nova.network.neutron [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Updating instance_info_cache with network_info: [{"id": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "address": "fa:16:3e:eb:06:0e", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee7857e-7e", "ovs_interfaceid": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.510548] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d734836e-06b2-4a43-b0de-1c508abf6242 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.518303] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94482637-e930-4ed1-806c-0ffe8bc3c562 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.560451] env[68285]: DEBUG nova.compute.manager [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1210.561873] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05500984-3e17-4ec1-b26f-ad480fe59eba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.572018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1469b7-b6d4-4cc1-b3a6-dd0537c8bd30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.587998] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892263, 'name': CreateSnapshot_Task, 'duration_secs': 1.327311} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.591331] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1210.595680] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc000b9-1450-4ef9-a6c2-ea296ec77c63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.606357] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c798dc2-c7ea-47ef-bab0-a6026ee43575 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.611464] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.186188} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.612554] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1210.613716] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790532a8-1515-4470-81a4-f51695c16d82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.636107] env[68285]: DEBUG nova.compute.provider_tree [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.637450] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892265, 'name': CreateVM_Task, 'duration_secs': 1.407811} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.639146] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1210.639999] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.640302] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.640930] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1210.653984] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-124ae3a5-174e-43e0-98e6-f2acdfc42284 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.667651] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/801f524e-28b5-4452-b880-0fc30d3c5eef.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1210.668433] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6952c99a-bba9-41c9-aaf3-d9436d56c0e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.690855] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1210.690855] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524af3f6-f55f-6ae9-5510-f222212ed164" [ 1210.690855] env[68285]: _type = "Task" [ 1210.690855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.695981] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1210.695981] env[68285]: value = "task-2892270" [ 1210.695981] env[68285]: _type = "Task" [ 1210.695981] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.703241] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524af3f6-f55f-6ae9-5510-f222212ed164, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.708309] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892270, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.755112] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebba2918-e82b-447c-9105-ec571b4d86ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.775426] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9aaf54-3859-4bc7-97de-f5942f7057f0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.779893] env[68285]: DEBUG oslo_concurrency.lockutils [req-a9337ac2-3bad-4c74-9162-7c886ffd9820 req-e0c14c92-ac6e-473e-9e41-782f740ab44b service nova] Releasing lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.783690] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "8a649b1e-d007-4032-a46c-b479365e5289" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.784074] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "8a649b1e-d007-4032-a46c-b479365e5289" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.788688] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance 'fe8e0a71-e9b0-4035-a696-51455d6fc473' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1211.091292] env[68285]: INFO nova.compute.manager [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] instance snapshotting [ 1211.095399] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b75297-9bdc-4b07-9bf4-4b4640677f7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.115872] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2207a86-3d01-431b-9493-3d95b8fa90ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.144944] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1211.145921] env[68285]: DEBUG nova.scheduler.client.report [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1211.148883] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-40e57183-6564-4ac0-9d1a-36de3f95c57e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.164202] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1211.164202] env[68285]: value = "task-2892271" [ 1211.164202] env[68285]: _type = "Task" [ 1211.164202] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.173656] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892271, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.200798] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524af3f6-f55f-6ae9-5510-f222212ed164, 'name': SearchDatastore_Task, 'duration_secs': 0.012335} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.204799] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.204963] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1211.205227] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.205386] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.205570] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1211.206144] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c7db347-04a5-486a-9271-9b6928904b60 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.213034] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892270, 'name': ReconfigVM_Task, 'duration_secs': 0.360835} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.213293] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/801f524e-28b5-4452-b880-0fc30d3c5eef.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1211.213948] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b51d91a-ccb4-4338-af29-959e38a87109 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.217146] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1211.217302] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1211.220563] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffbe8b8c-816c-49c5-bae4-4d22ccf1d89b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.221095] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1211.221095] env[68285]: value = "task-2892272" [ 1211.221095] env[68285]: _type = "Task" [ 1211.221095] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.230688] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1211.230688] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527cc21f-ae96-d5f7-a6d5-82ced2353cfa" [ 1211.230688] env[68285]: _type = "Task" [ 1211.230688] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.239590] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892272, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.246873] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527cc21f-ae96-d5f7-a6d5-82ced2353cfa, 'name': SearchDatastore_Task, 'duration_secs': 0.018122} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.247776] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-897ff76b-9096-441a-845f-baf6a49474dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.254851] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1211.254851] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52669e67-56b3-edd6-90ff-06541f038277" [ 1211.254851] env[68285]: _type = "Task" [ 1211.254851] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.264340] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52669e67-56b3-edd6-90ff-06541f038277, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.293063] env[68285]: DEBUG nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1211.298145] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1211.298713] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34f629cc-6282-484c-89fe-52c81d209110 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.305490] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1211.305490] env[68285]: value = "task-2892273" [ 1211.305490] env[68285]: _type = "Task" [ 1211.305490] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.314726] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.629433] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1211.629751] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6bb67bb2-2249-4be5-8c33-ba9574968605 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.637817] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1211.637817] env[68285]: value = "task-2892274" [ 1211.637817] env[68285]: _type = "Task" [ 1211.637817] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.647062] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892274, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.659888] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.923s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.665089] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.894s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.665089] env[68285]: DEBUG nova.objects.instance [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lazy-loading 'resources' on Instance uuid 7790f1e6-c73f-40d6-97af-00e9c518a09c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.676012] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892271, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.712536] env[68285]: INFO nova.network.neutron [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating port 724df450-925b-47ae-884b-4935b5b95ab2 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1211.731742] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892272, 'name': Rename_Task, 'duration_secs': 0.161291} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.732023] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1211.732280] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ffa234c-34bb-48bb-8c25-cfbac994bed2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.738143] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1211.738143] env[68285]: value = "task-2892275" [ 1211.738143] env[68285]: _type = "Task" [ 1211.738143] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.747292] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.767856] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52669e67-56b3-edd6-90ff-06541f038277, 'name': SearchDatastore_Task, 'duration_secs': 0.019752} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.767856] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.767856] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/feda1a98-3086-43a6-a887-f4d1602ca8ee.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1211.768171] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c959821-8b36-4601-b37e-ecb1c679583f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.777438] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1211.777438] env[68285]: value = "task-2892276" [ 1211.777438] env[68285]: _type = "Task" [ 1211.777438] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.790056] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892276, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.817036] env[68285]: DEBUG oslo_vmware.api [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892273, 'name': PowerOnVM_Task, 'duration_secs': 0.424196} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.817036] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1211.817036] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-6e76d656-553e-41c9-985c-95867b021e29 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance 'fe8e0a71-e9b0-4035-a696-51455d6fc473' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1211.827739] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.150335] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892274, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.179613] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892271, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.250775] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892275, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.290942] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892276, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.532745] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2169c460-4d29-4e70-a843-7b53d457390d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.541995] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3ff921-4dc8-40f9-a89c-b8990132018e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.575130] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f90d9d-cf43-4a9c-b114-3914f047f07c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.581341] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a70185b-cf57-4bc3-a062-44077a7e9b77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.596193] env[68285]: DEBUG nova.compute.provider_tree [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.649730] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892274, 'name': CreateSnapshot_Task, 'duration_secs': 0.657842} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.650040] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1212.650898] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274b375b-4461-44da-9412-02731470934f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.681380] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892271, 'name': CloneVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.750545] env[68285]: DEBUG oslo_vmware.api [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892275, 'name': PowerOnVM_Task, 'duration_secs': 0.769644} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.750941] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1212.751016] env[68285]: INFO nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1212.751445] env[68285]: DEBUG nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1212.751994] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c553ad-c1ea-4fef-921f-fca4969fe229 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.785959] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892276, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526982} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.786230] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/feda1a98-3086-43a6-a887-f4d1602ca8ee.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1212.786426] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1212.786669] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4ad641e-d76e-40e5-9316-77e49e003d32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.793865] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1212.793865] env[68285]: value = "task-2892278" [ 1212.793865] env[68285]: _type = "Task" [ 1212.793865] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.804498] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.099688] env[68285]: DEBUG nova.scheduler.client.report [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.175127] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1213.175504] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1add87e3-22dd-415c-bf3f-ea7608fd83f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.187898] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892271, 'name': CloneVM_Task, 'duration_secs': 1.563755} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.189086] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Created linked-clone VM from snapshot [ 1213.190045] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1213.190045] env[68285]: value = "task-2892279" [ 1213.190045] env[68285]: _type = "Task" [ 1213.190045] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.190727] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc88050-0205-431a-9587-fce545775efc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.200982] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Uploading image a68a033e-8b7f-4166-baaf-710df66ebdaf {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1213.206102] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892279, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.216528] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1213.216806] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c1ce0c0b-b961-4779-9c62-c168b421bfbe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.223429] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1213.223429] env[68285]: value = "task-2892280" [ 1213.223429] env[68285]: _type = "Task" [ 1213.223429] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.232573] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892280, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.238638] env[68285]: DEBUG nova.compute.manager [req-ac44de98-7dbc-4379-8b5b-abc7e6e3e122 req-b2d03577-518c-45ca-ba0d-3efc1b6ad363 service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received event network-vif-plugged-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1213.238863] env[68285]: DEBUG oslo_concurrency.lockutils [req-ac44de98-7dbc-4379-8b5b-abc7e6e3e122 req-b2d03577-518c-45ca-ba0d-3efc1b6ad363 service nova] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.239104] env[68285]: DEBUG oslo_concurrency.lockutils [req-ac44de98-7dbc-4379-8b5b-abc7e6e3e122 req-b2d03577-518c-45ca-ba0d-3efc1b6ad363 service nova] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.239787] env[68285]: DEBUG oslo_concurrency.lockutils [req-ac44de98-7dbc-4379-8b5b-abc7e6e3e122 req-b2d03577-518c-45ca-ba0d-3efc1b6ad363 service nova] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.239787] env[68285]: DEBUG nova.compute.manager [req-ac44de98-7dbc-4379-8b5b-abc7e6e3e122 req-b2d03577-518c-45ca-ba0d-3efc1b6ad363 service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] No waiting events found dispatching network-vif-plugged-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1213.239787] env[68285]: WARNING nova.compute.manager [req-ac44de98-7dbc-4379-8b5b-abc7e6e3e122 req-b2d03577-518c-45ca-ba0d-3efc1b6ad363 service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received unexpected event network-vif-plugged-724df450-925b-47ae-884b-4935b5b95ab2 for instance with vm_state shelved_offloaded and task_state spawning. [ 1213.272103] env[68285]: INFO nova.compute.manager [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Took 20.98 seconds to build instance. [ 1213.295155] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.295366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.295546] env[68285]: DEBUG nova.network.neutron [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1213.312433] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.236761} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.312740] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1213.313835] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939d40a9-efbf-441c-85f7-9dcf302fec90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.354708] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/feda1a98-3086-43a6-a887-f4d1602ca8ee.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1213.357091] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8277efe-d230-46e3-a615-0d573c9c3c7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.385329] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1213.385329] env[68285]: value = "task-2892281" [ 1213.385329] env[68285]: _type = "Task" [ 1213.385329] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.394383] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892281, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.604998] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.940s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.607924] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.986s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.608221] env[68285]: DEBUG nova.objects.instance [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'resources' on Instance uuid 9c190abd-23ee-4e8e-8b91-9050847581d5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.628387] env[68285]: INFO nova.scheduler.client.report [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Deleted allocations for instance 7790f1e6-c73f-40d6-97af-00e9c518a09c [ 1213.705061] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892279, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.735329] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892280, 'name': Destroy_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.774120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3eb73bfc-54a3-463f-97a9-196ed3f0a978 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.494s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.898717] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892281, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.910699] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.910699] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.943045] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.944106] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.945100] env[68285]: DEBUG nova.compute.manager [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Going to confirm migration 5 {{(pid=68285) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1214.021511] env[68285]: DEBUG nova.network.neutron [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [{"id": "724df450-925b-47ae-884b-4935b5b95ab2", "address": "fa:16:3e:0f:59:8c", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap724df450-92", "ovs_interfaceid": "724df450-925b-47ae-884b-4935b5b95ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.139315] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4d8c717d-8407-4721-b72e-1bbfc1685508 tempest-AttachVolumeTestJSON-2064960774 tempest-AttachVolumeTestJSON-2064960774-project-member] Lock "7790f1e6-c73f-40d6-97af-00e9c518a09c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.507s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.203959] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892279, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.233938] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892280, 'name': Destroy_Task, 'duration_secs': 0.611323} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.234216] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Destroyed the VM [ 1214.234451] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1214.234700] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-47a76842-a9e0-40e9-b71e-aac832a3f318 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.241888] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1214.241888] env[68285]: value = "task-2892282" [ 1214.241888] env[68285]: _type = "Task" [ 1214.241888] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.253844] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892282, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.386311] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085007da-c867-480f-8255-a8115b377d6d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.402017] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059f63ba-dda9-4860-9cf7-41a31852eff9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.406127] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892281, 'name': ReconfigVM_Task, 'duration_secs': 0.55954} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.406127] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Reconfigured VM instance instance-00000063 to attach disk [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/feda1a98-3086-43a6-a887-f4d1602ca8ee.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.406836] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb7e67c0-2f4e-4df7-892f-81c8520ce72f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.438061] env[68285]: DEBUG nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1214.443099] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c77e75c-d516-400a-a919-dac240ec5523 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.447613] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1214.447613] env[68285]: value = "task-2892284" [ 1214.447613] env[68285]: _type = "Task" [ 1214.447613] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.459415] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3542cc98-fa80-4330-b6ef-872a562a5de9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.470051] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892284, 'name': Rename_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.480442] env[68285]: DEBUG nova.compute.provider_tree [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.524576] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.557782] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.558080] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1214.558361] env[68285]: DEBUG nova.network.neutron [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1214.558685] env[68285]: DEBUG nova.objects.instance [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lazy-loading 'info_cache' on Instance uuid fe8e0a71-e9b0-4035-a696-51455d6fc473 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.563150] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ac082b4e8b7f093286a63c72461f621b',container_format='bare',created_at=2025-03-10T15:57:21Z,direct_url=,disk_format='vmdk',id=ba8823bf-179d-43d4-8712-d66dd79f84da,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-537650709-shelved',owner='2fb202eb50a74c558edb6fdb9dfaf077',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-03-10T15:57:37Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1214.563445] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1214.563683] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.563980] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1214.564200] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.564848] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1214.564848] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1214.564848] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1214.564955] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1214.565126] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1214.566027] env[68285]: DEBUG nova.virt.hardware [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1214.566337] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3b0249-96c2-45bd-9ca9-041787320f83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.575356] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c37e86-7ec9-4eff-bb17-1f7041a5e801 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.590773] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:59:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '724df450-925b-47ae-884b-4935b5b95ab2', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1214.598347] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1214.598886] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1214.599142] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71c4f86c-7b41-4d2b-b2e0-54b5bf2beeed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.618370] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1214.618370] env[68285]: value = "task-2892285" [ 1214.618370] env[68285]: _type = "Task" [ 1214.618370] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.626218] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892285, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.708930] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892279, 'name': CloneVM_Task, 'duration_secs': 1.351952} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.709367] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Created linked-clone VM from snapshot [ 1214.710245] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f615167-0499-4a7c-a1da-7d8f3bc5cfc9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.720521] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Uploading image dc4540fc-09c1-4961-a8fb-336d7b5f5266 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1214.742069] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1214.742069] env[68285]: value = "vm-581049" [ 1214.742069] env[68285]: _type = "VirtualMachine" [ 1214.742069] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1214.742424] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9c7d61eb-1bbb-40b9-bbee-b3a3f4e5dbe0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.752752] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892282, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.754014] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lease: (returnval){ [ 1214.754014] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ad3523-8e2b-6851-f921-fd8efdfa076f" [ 1214.754014] env[68285]: _type = "HttpNfcLease" [ 1214.754014] env[68285]: } obtained for exporting VM: (result){ [ 1214.754014] env[68285]: value = "vm-581049" [ 1214.754014] env[68285]: _type = "VirtualMachine" [ 1214.754014] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1214.754387] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the lease: (returnval){ [ 1214.754387] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ad3523-8e2b-6851-f921-fd8efdfa076f" [ 1214.754387] env[68285]: _type = "HttpNfcLease" [ 1214.754387] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1214.760232] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1214.760232] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ad3523-8e2b-6851-f921-fd8efdfa076f" [ 1214.760232] env[68285]: _type = "HttpNfcLease" [ 1214.760232] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1214.958860] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892284, 'name': Rename_Task, 'duration_secs': 0.164854} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.960051] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.960252] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1214.960490] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ef3fd00-5e7e-4846-bcf4-f9d0bcc49f03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.970223] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1214.970223] env[68285]: value = "task-2892287" [ 1214.970223] env[68285]: _type = "Task" [ 1214.970223] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.981296] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892287, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.984336] env[68285]: DEBUG nova.scheduler.client.report [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.084039] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1215.084039] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581046', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'name': 'volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5abddda1-9bf7-4039-81c7-8622f43cc72e', 'attached_at': '', 'detached_at': '', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'serial': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1215.085306] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7015d609-98a7-4fea-82c1-e1316a913e36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.103785] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9eaae3-2cd0-42cf-9118-ef41c903711f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.130860] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa/volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1215.134421] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cee2d6e1-1380-448b-ac53-a88fb647c762 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.153331] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892285, 'name': CreateVM_Task, 'duration_secs': 0.514016} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.154513] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1215.154860] env[68285]: DEBUG oslo_vmware.api [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1215.154860] env[68285]: value = "task-2892288" [ 1215.154860] env[68285]: _type = "Task" [ 1215.154860] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.155495] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.155659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.156036] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1215.156353] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6cc3ee0-0883-44c9-b95e-fe26bb68dca8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.164635] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1215.164635] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522043b6-51cf-8def-738b-de87bd21aa50" [ 1215.164635] env[68285]: _type = "Task" [ 1215.164635] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.167992] env[68285]: DEBUG oslo_vmware.api [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892288, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.176187] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522043b6-51cf-8def-738b-de87bd21aa50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.254941] env[68285]: DEBUG oslo_vmware.api [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892282, 'name': RemoveSnapshot_Task, 'duration_secs': 0.903372} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.257998] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1215.264497] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1215.264497] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ad3523-8e2b-6851-f921-fd8efdfa076f" [ 1215.264497] env[68285]: _type = "HttpNfcLease" [ 1215.264497] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1215.265850] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1215.265850] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ad3523-8e2b-6851-f921-fd8efdfa076f" [ 1215.265850] env[68285]: _type = "HttpNfcLease" [ 1215.265850] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1215.266753] env[68285]: DEBUG nova.compute.manager [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received event network-changed-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.267188] env[68285]: DEBUG nova.compute.manager [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Refreshing instance network info cache due to event network-changed-724df450-925b-47ae-884b-4935b5b95ab2. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1215.267188] env[68285]: DEBUG oslo_concurrency.lockutils [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] Acquiring lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.267496] env[68285]: DEBUG oslo_concurrency.lockutils [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] Acquired lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.267496] env[68285]: DEBUG nova.network.neutron [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Refreshing network info cache for port 724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1215.269193] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b945cd8-efbc-44c2-8f23-019127aff6ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.277492] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5203860a-ffd6-92d1-f70e-05d20ce486f4/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1215.277666] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5203860a-ffd6-92d1-f70e-05d20ce486f4/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1215.413272] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-20b852eb-76c0-42c6-bd27-eea79767a621 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.480858] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892287, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.488752] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.491101] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.561s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.491297] env[68285]: DEBUG nova.objects.instance [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'resources' on Instance uuid fe6c495f-6917-4e3d-acce-7487a45e3ef4 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.510166] env[68285]: INFO nova.scheduler.client.report [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted allocations for instance 9c190abd-23ee-4e8e-8b91-9050847581d5 [ 1215.666520] env[68285]: DEBUG oslo_vmware.api [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.679066] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.679319] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Processing image ba8823bf-179d-43d4-8712-d66dd79f84da {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1215.679557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da/ba8823bf-179d-43d4-8712-d66dd79f84da.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.679704] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da/ba8823bf-179d-43d4-8712-d66dd79f84da.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.679889] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1215.680280] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a84a193-81cc-4e88-b424-5610e06a29f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.690095] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1215.690326] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1215.693951] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df506424-cbe0-4f55-9021-a0a1bb5c95c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.700049] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1215.700049] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528ca678-323b-95d3-c82b-3d56d86442e6" [ 1215.700049] env[68285]: _type = "Task" [ 1215.700049] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.708384] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528ca678-323b-95d3-c82b-3d56d86442e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.763484] env[68285]: WARNING nova.compute.manager [None req-5b5f6d0a-4b0f-47b4-8f28-694ff84c2899 tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Image not found during snapshot: nova.exception.ImageNotFound: Image a68a033e-8b7f-4166-baaf-710df66ebdaf could not be found. [ 1215.839388] env[68285]: DEBUG nova.network.neutron [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance_info_cache with network_info: [{"id": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "address": "fa:16:3e:6f:50:a1", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9199e860-a7", "ovs_interfaceid": "9199e860-a70a-4057-93f0-526a4c8a2ed7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.981988] env[68285]: DEBUG oslo_vmware.api [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892287, 'name': PowerOnVM_Task, 'duration_secs': 0.813039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.986025] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1215.986290] env[68285]: INFO nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Took 9.06 seconds to spawn the instance on the hypervisor. [ 1215.986662] env[68285]: DEBUG nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1215.987953] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fc5d0d-d097-4c11-b9ff-df91db70c1ca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.026899] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06329978-f4a8-4058-8554-6f57ad1d3ff6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "9c190abd-23ee-4e8e-8b91-9050847581d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.226s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.099090] env[68285]: DEBUG nova.network.neutron [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updated VIF entry in instance network info cache for port 724df450-925b-47ae-884b-4935b5b95ab2. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1216.099505] env[68285]: DEBUG nova.network.neutron [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [{"id": "724df450-925b-47ae-884b-4935b5b95ab2", "address": "fa:16:3e:0f:59:8c", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap724df450-92", "ovs_interfaceid": "724df450-925b-47ae-884b-4935b5b95ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.172200] env[68285]: DEBUG oslo_vmware.api [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892288, 'name': ReconfigVM_Task, 'duration_secs': 0.709062} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.172602] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfigured VM instance instance-00000059 to attach disk [datastore2] volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa/volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1216.180772] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a74a955-5219-4e94-9bf8-cbddeecd365c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.198802] env[68285]: DEBUG oslo_vmware.api [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1216.198802] env[68285]: value = "task-2892289" [ 1216.198802] env[68285]: _type = "Task" [ 1216.198802] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.213675] env[68285]: DEBUG oslo_vmware.api [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892289, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.219179] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Preparing fetch location {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1216.219449] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Fetch image to [datastore2] OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018/OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018.vmdk {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1216.219722] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Downloading stream optimized image ba8823bf-179d-43d4-8712-d66dd79f84da to [datastore2] OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018/OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018.vmdk on the data store datastore2 as vApp {{(pid=68285) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1216.219901] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Downloading image file data ba8823bf-179d-43d4-8712-d66dd79f84da to the ESX as VM named 'OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018' {{(pid=68285) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1216.297316] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa5223c-ed2d-42c6-af7f-67b536c3011f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.306970] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b1026f-4ca5-437a-b4fe-7fa06b7c53b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.341720] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1216.341720] env[68285]: value = "resgroup-9" [ 1216.341720] env[68285]: _type = "ResourcePool" [ 1216.341720] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1216.343058] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac727c2-97c0-4a16-ab04-aa4a985e7afb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.346725] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-fe8e0a71-e9b0-4035-a696-51455d6fc473" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.347131] env[68285]: DEBUG nova.objects.instance [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lazy-loading 'migration_context' on Instance uuid fe8e0a71-e9b0-4035-a696-51455d6fc473 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1216.348484] env[68285]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-ba341f46-1ebd-4c7b-895d-44ba3c335f55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.365793] env[68285]: DEBUG nova.objects.base [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1216.367162] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c1a03a-2baa-4d49-9dd3-22839ac5a278 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.393215] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cf3d0a-ff21-4623-96a7-dccbfd15e982 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.397138] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lease: (returnval){ [ 1216.397138] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229760d-50d3-40ef-ec90-bcc929714264" [ 1216.397138] env[68285]: _type = "HttpNfcLease" [ 1216.397138] env[68285]: } obtained for vApp import into resource pool (val){ [ 1216.397138] env[68285]: value = "resgroup-9" [ 1216.397138] env[68285]: _type = "ResourcePool" [ 1216.397138] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1216.397330] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the lease: (returnval){ [ 1216.397330] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229760d-50d3-40ef-ec90-bcc929714264" [ 1216.397330] env[68285]: _type = "HttpNfcLease" [ 1216.397330] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1216.398665] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96afb944-6bb1-4302-90a2-d99574b3cf21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.414429] env[68285]: DEBUG nova.compute.provider_tree [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.419063] env[68285]: DEBUG oslo_vmware.api [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1216.419063] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528d1afb-4220-e30f-fcfc-f92fcce0b409" [ 1216.419063] env[68285]: _type = "Task" [ 1216.419063] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.422879] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1216.422879] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229760d-50d3-40ef-ec90-bcc929714264" [ 1216.422879] env[68285]: _type = "HttpNfcLease" [ 1216.422879] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1216.429496] env[68285]: DEBUG oslo_vmware.api [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528d1afb-4220-e30f-fcfc-f92fcce0b409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.509842] env[68285]: INFO nova.compute.manager [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Took 23.75 seconds to build instance. [ 1216.602548] env[68285]: DEBUG oslo_concurrency.lockutils [req-fa90bc95-ece8-49f2-a3d9-744741330106 req-97006789-efe0-4ace-876f-3835ba71818f service nova] Releasing lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.653492] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "ef87ff30-ef45-4abb-8696-d5493572703a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.653889] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ef87ff30-ef45-4abb-8696-d5493572703a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.654156] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "ef87ff30-ef45-4abb-8696-d5493572703a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.654452] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ef87ff30-ef45-4abb-8696-d5493572703a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.654810] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ef87ff30-ef45-4abb-8696-d5493572703a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.657069] env[68285]: INFO nova.compute.manager [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Terminating instance [ 1216.709434] env[68285]: DEBUG oslo_vmware.api [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892289, 'name': ReconfigVM_Task, 'duration_secs': 0.197918} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.709804] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581046', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'name': 'volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5abddda1-9bf7-4039-81c7-8622f43cc72e', 'attached_at': '', 'detached_at': '', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'serial': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1216.907320] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1216.907320] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229760d-50d3-40ef-ec90-bcc929714264" [ 1216.907320] env[68285]: _type = "HttpNfcLease" [ 1216.907320] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1216.919530] env[68285]: DEBUG nova.scheduler.client.report [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1216.933225] env[68285]: DEBUG oslo_vmware.api [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528d1afb-4220-e30f-fcfc-f92fcce0b409, 'name': SearchDatastore_Task, 'duration_secs': 0.0169} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.933794] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.012469] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4b4045c6-bcf0-4519-bc44-79e019bd1c85 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.261s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.161467] env[68285]: DEBUG nova.compute.manager [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1217.162168] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1217.162989] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19d4faa-e81f-4728-bfee-1ad1137fc42d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.170944] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1217.172131] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2941992f-1869-4ce2-aa13-32338aab5ed6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.178028] env[68285]: DEBUG oslo_vmware.api [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1217.178028] env[68285]: value = "task-2892291" [ 1217.178028] env[68285]: _type = "Task" [ 1217.178028] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.186891] env[68285]: DEBUG oslo_vmware.api [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.411613] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1217.411613] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229760d-50d3-40ef-ec90-bcc929714264" [ 1217.411613] env[68285]: _type = "HttpNfcLease" [ 1217.411613] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1217.412533] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1217.412533] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229760d-50d3-40ef-ec90-bcc929714264" [ 1217.412533] env[68285]: _type = "HttpNfcLease" [ 1217.412533] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1217.413196] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb5c572-9bfc-4cd7-bae0-fd8713d2f1d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.421340] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b5534-7444-37fe-4c63-5636b3ddebde/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1217.421555] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b5534-7444-37fe-4c63-5636b3ddebde/disk-0.vmdk. {{(pid=68285) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1217.424740] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.480715] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.574s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.482438] env[68285]: INFO nova.compute.claims [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1217.487450] env[68285]: INFO nova.scheduler.client.report [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted allocations for instance fe6c495f-6917-4e3d-acce-7487a45e3ef4 [ 1217.495393] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fc939ba2-af37-461b-af60-62eddb00e293 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.688786] env[68285]: DEBUG oslo_vmware.api [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892291, 'name': PowerOffVM_Task, 'duration_secs': 0.241217} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.691378] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1217.691378] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1217.691378] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-576a1561-316e-4f48-b492-4d9609f0dda2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.752116] env[68285]: DEBUG nova.objects.instance [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'flavor' on Instance uuid 5abddda1-9bf7-4039-81c7-8622f43cc72e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.755572] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1217.755928] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1217.756301] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleting the datastore file [datastore1] ef87ff30-ef45-4abb-8696-d5493572703a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1217.757025] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71354212-bc06-4d1a-b6b9-e7282703b3a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.763376] env[68285]: DEBUG oslo_vmware.api [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for the task: (returnval){ [ 1217.763376] env[68285]: value = "task-2892294" [ 1217.763376] env[68285]: _type = "Task" [ 1217.763376] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.772413] env[68285]: DEBUG oslo_vmware.api [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892294, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.998992] env[68285]: DEBUG oslo_concurrency.lockutils [None req-28360890-5988-4314-95b1-94c52c818eb4 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "fe6c495f-6917-4e3d-acce-7487a45e3ef4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.699s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.258477] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44bd516d-5e85-4073-a879-07dccf386a44 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.302s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.275248] env[68285]: DEBUG oslo_vmware.api [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Task: {'id': task-2892294, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18581} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.275509] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1218.275718] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1218.275912] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1218.276092] env[68285]: INFO nova.compute.manager [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1218.276350] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1218.276541] env[68285]: DEBUG nova.compute.manager [-] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1218.276649] env[68285]: DEBUG nova.network.neutron [-] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1218.808672] env[68285]: INFO nova.compute.manager [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Rescuing [ 1218.808949] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.809178] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.809362] env[68285]: DEBUG nova.network.neutron [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1218.822083] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180e07ca-f4e9-405c-8681-c35a2fceaeb9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.834782] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0713f06f-9675-4079-97af-cdb3fc04323f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.875982] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae152d92-53e7-467c-84b4-58b8bbcd15b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.884926] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaaeff5-1883-4bf9-966e-a9c5c5b7e07a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.904324] env[68285]: DEBUG nova.compute.provider_tree [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.907117] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Completed reading data from the image iterator. {{(pid=68285) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1218.907324] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b5534-7444-37fe-4c63-5636b3ddebde/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1218.908236] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ee0517-c350-4489-aca0-a53715a6ae3c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.915857] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b5534-7444-37fe-4c63-5636b3ddebde/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1218.915857] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b5534-7444-37fe-4c63-5636b3ddebde/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1218.916084] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-dd100394-7e9d-4065-91eb-21c4b9b99edb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.132860] env[68285]: DEBUG nova.compute.manager [req-e892dd75-2eb2-4b97-8233-ae4d8c8ba5ac req-1c96e696-571f-49b1-a091-a169e48f446b service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Received event network-vif-deleted-0399b6c2-519b-440a-a775-e9fb28623777 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1219.133088] env[68285]: INFO nova.compute.manager [req-e892dd75-2eb2-4b97-8233-ae4d8c8ba5ac req-1c96e696-571f-49b1-a091-a169e48f446b service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Neutron deleted interface 0399b6c2-519b-440a-a775-e9fb28623777; detaching it from the instance and deleting it from the info cache [ 1219.133270] env[68285]: DEBUG nova.network.neutron [req-e892dd75-2eb2-4b97-8233-ae4d8c8ba5ac req-1c96e696-571f-49b1-a091-a169e48f446b service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.268408] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b5534-7444-37fe-4c63-5636b3ddebde/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1219.268729] env[68285]: INFO nova.virt.vmwareapi.images [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Downloaded image file data ba8823bf-179d-43d4-8712-d66dd79f84da [ 1219.269557] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895a9b9e-ecd6-437f-a48d-87e9e3dde0f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.292831] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc26f499-3609-4c03-a674-038a8b86269a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.324577] env[68285]: INFO nova.virt.vmwareapi.images [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] The imported VM was unregistered [ 1219.326570] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Caching image {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1219.326808] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Creating directory with path [datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.327097] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57ecc22a-ced4-4248-a9db-d96e37f2f445 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.339939] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Created directory with path [datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1219.339939] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018/OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018.vmdk to [datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da/ba8823bf-179d-43d4-8712-d66dd79f84da.vmdk. {{(pid=68285) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1219.340179] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ff6c4f60-4e0e-4fd7-b02c-dfcf00977ee7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.346555] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1219.346555] env[68285]: value = "task-2892296" [ 1219.346555] env[68285]: _type = "Task" [ 1219.346555] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.356353] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892296, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.412932] env[68285]: DEBUG nova.scheduler.client.report [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1219.416311] env[68285]: DEBUG nova.network.neutron [-] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.580862] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "b2199b56-64bd-4096-b877-e10656b09313" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.581379] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "b2199b56-64bd-4096-b877-e10656b09313" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.643033] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3a8746a-32f4-4c8c-9db8-48437fc87a40 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.656688] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f9677f-ae50-4b85-8d7d-d24be1e0fc97 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.694874] env[68285]: DEBUG nova.compute.manager [req-e892dd75-2eb2-4b97-8233-ae4d8c8ba5ac req-1c96e696-571f-49b1-a091-a169e48f446b service nova] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Detach interface failed, port_id=0399b6c2-519b-440a-a775-e9fb28623777, reason: Instance ef87ff30-ef45-4abb-8696-d5493572703a could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1219.744721] env[68285]: DEBUG nova.network.neutron [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Updating instance_info_cache with network_info: [{"id": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "address": "fa:16:3e:eb:06:0e", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ee7857e-7e", "ovs_interfaceid": "4ee7857e-7e56-4be9-bc5b-a3963713b734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.856540] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892296, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.919324] env[68285]: INFO nova.compute.manager [-] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Took 1.64 seconds to deallocate network for instance. [ 1219.920334] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.920568] env[68285]: DEBUG nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1219.929532] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.102s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.931106] env[68285]: INFO nova.compute.claims [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1220.084780] env[68285]: DEBUG nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1220.248152] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-feda1a98-3086-43a6-a887-f4d1602ca8ee" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.318656] env[68285]: DEBUG nova.compute.manager [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1220.357603] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892296, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.389832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "d4818c98-8134-4426-bd35-b2339ed6abd4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.390142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.426292] env[68285]: DEBUG nova.compute.utils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1220.427667] env[68285]: DEBUG nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1220.427827] env[68285]: DEBUG nova.network.neutron [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1220.430493] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.484710] env[68285]: DEBUG nova.policy [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '48d81ce2f85a488aae77c0a036d8d38d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7131f91e9134b6f89ee5a4ef709ea3b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1220.614434] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.841163] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.859542] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892296, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.883027] env[68285]: DEBUG nova.network.neutron [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Successfully created port: d139a9ee-edc0-441e-91e6-f3252990f954 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1220.892725] env[68285]: DEBUG nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1220.932474] env[68285]: DEBUG nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1221.254585] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb6a317-eb39-427c-b012-739500a79a68 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.263905] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd79a77-8f62-43d3-91c6-df9bbabb4ba5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.299536] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf016dc-4de1-4e6e-93c5-b59d3267cd3a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.314267] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e24858e-f476-416b-9f3b-b891195085bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.331457] env[68285]: DEBUG nova.compute.provider_tree [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.360290] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892296, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.418804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.808278] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.808278] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de531620-9d8c-4570-a614-04e303a5b63b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.816980] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1221.816980] env[68285]: value = "task-2892298" [ 1221.816980] env[68285]: _type = "Task" [ 1221.816980] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.826458] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.837674] env[68285]: DEBUG nova.scheduler.client.report [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1221.861958] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892296, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.943576] env[68285]: DEBUG nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1221.969651] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1221.969985] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1221.970167] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1221.970355] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1221.970504] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1221.970651] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1221.970860] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1221.971033] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1221.971230] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1221.971410] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1221.971587] env[68285]: DEBUG nova.virt.hardware [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1221.972629] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e153451-a005-43cf-947f-1d93a53f18da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.981057] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99726ff-8057-4fc0-882b-016ef545de42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.327530] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892298, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.345153] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.345153] env[68285]: DEBUG nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1222.348212] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.387s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.348382] env[68285]: INFO nova.compute.claims [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1222.360393] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892296, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.700374} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.360660] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018/OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018.vmdk to [datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da/ba8823bf-179d-43d4-8712-d66dd79f84da.vmdk. [ 1222.360849] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Cleaning up location [datastore2] OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1222.361027] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_3bf3f3be-9190-41b2-9d9d-d5f1bd653018 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.361321] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d875f9d9-bab1-4837-a65a-15efa026a0e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.370534] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1222.370534] env[68285]: value = "task-2892299" [ 1222.370534] env[68285]: _type = "Task" [ 1222.370534] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.378219] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.716635] env[68285]: DEBUG nova.compute.manager [req-0b622297-decf-4296-9497-9b9c1065c26f req-3e7ce013-2be9-4cb7-8630-1939931dd2f5 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Received event network-vif-plugged-d139a9ee-edc0-441e-91e6-f3252990f954 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1222.716855] env[68285]: DEBUG oslo_concurrency.lockutils [req-0b622297-decf-4296-9497-9b9c1065c26f req-3e7ce013-2be9-4cb7-8630-1939931dd2f5 service nova] Acquiring lock "49831327-6e13-412e-ab83-bf350e6e9761-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.717076] env[68285]: DEBUG oslo_concurrency.lockutils [req-0b622297-decf-4296-9497-9b9c1065c26f req-3e7ce013-2be9-4cb7-8630-1939931dd2f5 service nova] Lock "49831327-6e13-412e-ab83-bf350e6e9761-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.717249] env[68285]: DEBUG oslo_concurrency.lockutils [req-0b622297-decf-4296-9497-9b9c1065c26f req-3e7ce013-2be9-4cb7-8630-1939931dd2f5 service nova] Lock "49831327-6e13-412e-ab83-bf350e6e9761-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.717416] env[68285]: DEBUG nova.compute.manager [req-0b622297-decf-4296-9497-9b9c1065c26f req-3e7ce013-2be9-4cb7-8630-1939931dd2f5 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] No waiting events found dispatching network-vif-plugged-d139a9ee-edc0-441e-91e6-f3252990f954 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1222.717575] env[68285]: WARNING nova.compute.manager [req-0b622297-decf-4296-9497-9b9c1065c26f req-3e7ce013-2be9-4cb7-8630-1939931dd2f5 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Received unexpected event network-vif-plugged-d139a9ee-edc0-441e-91e6-f3252990f954 for instance with vm_state building and task_state spawning. [ 1222.800681] env[68285]: DEBUG nova.network.neutron [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Successfully updated port: d139a9ee-edc0-441e-91e6-f3252990f954 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1222.834484] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892298, 'name': PowerOffVM_Task, 'duration_secs': 0.657551} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.834920] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.836994] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87721093-b455-44b0-8df6-ff89fc6de160 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.867136] env[68285]: DEBUG nova.compute.utils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1222.872014] env[68285]: DEBUG nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1222.872260] env[68285]: DEBUG nova.network.neutron [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1222.875689] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8f047b-ecc9-4ce0-be98-184931ae2ccb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.901486] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0484} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.901857] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.902117] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da/ba8823bf-179d-43d4-8712-d66dd79f84da.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.902474] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da/ba8823bf-179d-43d4-8712-d66dd79f84da.vmdk to [datastore2] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1222.902864] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a49f1934-09d3-40e7-b94f-ed49b7db456f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.912145] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1222.912145] env[68285]: value = "task-2892300" [ 1222.912145] env[68285]: _type = "Task" [ 1222.912145] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.922997] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1222.924379] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-397322b8-e9d9-4206-932a-0b4bfea320fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.932020] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.932020] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1222.932020] env[68285]: value = "task-2892301" [ 1222.932020] env[68285]: _type = "Task" [ 1222.932020] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.940230] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.948514] env[68285]: DEBUG nova.policy [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '184360cab7224b9eaef80dfe89d0208b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288595d9298e43fa859bc6b68054aa08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1223.303678] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "refresh_cache-49831327-6e13-412e-ab83-bf350e6e9761" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.303988] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquired lock "refresh_cache-49831327-6e13-412e-ab83-bf350e6e9761" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.304060] env[68285]: DEBUG nova.network.neutron [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1223.338116] env[68285]: DEBUG nova.network.neutron [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Successfully created port: b3858df6-8c44-4434-93e6-cc789ec6f4c3 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1223.372911] env[68285]: DEBUG nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1223.429068] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892300, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.442445] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1223.442696] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1223.442941] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.443101] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.443284] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.443544] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e79f191-9d4b-4151-ae92-9be991d8b94e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.466304] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.466581] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1223.467730] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5619c80b-d8c8-41c7-b926-8d5b5ab41f16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.474014] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1223.474014] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c100ec-652c-22e8-6b0e-1432e6ec9e8b" [ 1223.474014] env[68285]: _type = "Task" [ 1223.474014] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.486086] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c100ec-652c-22e8-6b0e-1432e6ec9e8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.695449] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3be29d-8f6a-465f-8308-9acc0aa1d927 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.704182] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5895c2c-b89e-4f18-af7b-f81949004f3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.738274] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a897f58-77f9-4ca9-a0dc-c966560267c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.746985] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffa269f-a107-4d1b-8914-8fe5ad6d4af3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.765327] env[68285]: DEBUG nova.compute.provider_tree [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.839844] env[68285]: DEBUG nova.network.neutron [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1223.932135] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892300, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.988148] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c100ec-652c-22e8-6b0e-1432e6ec9e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.086672} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.989340] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29628699-a8ed-476b-abae-a789d13935b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.995901] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1223.995901] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521e9d1f-ecad-d3e1-cfa0-06361c52a7d4" [ 1223.995901] env[68285]: _type = "Task" [ 1223.995901] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.006118] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521e9d1f-ecad-d3e1-cfa0-06361c52a7d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.007344] env[68285]: DEBUG nova.network.neutron [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Updating instance_info_cache with network_info: [{"id": "d139a9ee-edc0-441e-91e6-f3252990f954", "address": "fa:16:3e:64:d9:d0", "network": {"id": "5bd744c2-3232-4600-b398-eca0a844cf62", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-919029468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7131f91e9134b6f89ee5a4ef709ea3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ac3fd84-c373-49f5-82dc-784a6cdb686d", "external-id": "nsx-vlan-transportzone-298", "segmentation_id": 298, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd139a9ee-ed", "ovs_interfaceid": "d139a9ee-edc0-441e-91e6-f3252990f954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.273191] env[68285]: DEBUG nova.scheduler.client.report [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1224.382921] env[68285]: DEBUG nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1224.409219] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1224.409524] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1224.409687] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1224.409869] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1224.410020] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1224.410173] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1224.410381] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1224.410543] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1224.410709] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1224.410870] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1224.411104] env[68285]: DEBUG nova.virt.hardware [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1224.412108] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3117bff1-c47d-4d55-8135-0ce2b15d477b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.425991] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f46c42-e084-482d-8ea1-72850552e795 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.433611] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892300, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.508809] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521e9d1f-ecad-d3e1-cfa0-06361c52a7d4, 'name': SearchDatastore_Task, 'duration_secs': 0.083987} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.509252] env[68285]: DEBUG oslo_concurrency.lockutils [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.509432] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. {{(pid=68285) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1224.510175] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Releasing lock "refresh_cache-49831327-6e13-412e-ab83-bf350e6e9761" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.510437] env[68285]: DEBUG nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Instance network_info: |[{"id": "d139a9ee-edc0-441e-91e6-f3252990f954", "address": "fa:16:3e:64:d9:d0", "network": {"id": "5bd744c2-3232-4600-b398-eca0a844cf62", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-919029468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7131f91e9134b6f89ee5a4ef709ea3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ac3fd84-c373-49f5-82dc-784a6cdb686d", "external-id": "nsx-vlan-transportzone-298", "segmentation_id": 298, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd139a9ee-ed", "ovs_interfaceid": "d139a9ee-edc0-441e-91e6-f3252990f954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1224.510676] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-816d793a-efc6-4137-9323-0e7ebff39525 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.513891] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:d9:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ac3fd84-c373-49f5-82dc-784a6cdb686d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd139a9ee-edc0-441e-91e6-f3252990f954', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1224.523486] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Creating folder: Project (b7131f91e9134b6f89ee5a4ef709ea3b). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1224.523864] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b20689b-3516-407b-9b86-752ca8c81c82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.533726] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1224.533726] env[68285]: value = "task-2892303" [ 1224.533726] env[68285]: _type = "Task" [ 1224.533726] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.540621] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Created folder: Project (b7131f91e9134b6f89ee5a4ef709ea3b) in parent group-v580775. [ 1224.540799] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Creating folder: Instances. Parent ref: group-v581052. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1224.541850] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d05550d-068a-4d69-8a91-f5c9d6f5dba1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.550757] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.564978] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Created folder: Instances in parent group-v581052. [ 1224.565074] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1224.565259] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1224.565481] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb9b23d1-a029-4f6e-ad8c-a6e9ea19c43d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.589687] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1224.589687] env[68285]: value = "task-2892305" [ 1224.589687] env[68285]: _type = "Task" [ 1224.589687] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.599540] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892305, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.755233] env[68285]: DEBUG nova.compute.manager [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Received event network-changed-d139a9ee-edc0-441e-91e6-f3252990f954 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.755467] env[68285]: DEBUG nova.compute.manager [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Refreshing instance network info cache due to event network-changed-d139a9ee-edc0-441e-91e6-f3252990f954. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1224.755686] env[68285]: DEBUG oslo_concurrency.lockutils [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] Acquiring lock "refresh_cache-49831327-6e13-412e-ab83-bf350e6e9761" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.755829] env[68285]: DEBUG oslo_concurrency.lockutils [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] Acquired lock "refresh_cache-49831327-6e13-412e-ab83-bf350e6e9761" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.755990] env[68285]: DEBUG nova.network.neutron [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Refreshing network info cache for port d139a9ee-edc0-441e-91e6-f3252990f954 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1224.779851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.779851] env[68285]: DEBUG nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1224.782695] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 7.849s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.932203] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892300, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.045131] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.101838] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892305, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.286208] env[68285]: DEBUG nova.compute.utils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1225.295893] env[68285]: DEBUG nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1225.295893] env[68285]: DEBUG nova.network.neutron [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1225.381389] env[68285]: DEBUG nova.network.neutron [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Successfully updated port: b3858df6-8c44-4434-93e6-cc789ec6f4c3 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1225.415385] env[68285]: DEBUG nova.policy [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1225.431564] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892300, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.347402} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.434874] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ba8823bf-179d-43d4-8712-d66dd79f84da/ba8823bf-179d-43d4-8712-d66dd79f84da.vmdk to [datastore2] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1225.436750] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08df8c59-4285-44db-af82-8ca365267c74 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.466396] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1225.469912] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98a94495-0df8-4b27-a63b-701c9e50936c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.498677] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1225.498677] env[68285]: value = "task-2892306" [ 1225.498677] env[68285]: _type = "Task" [ 1225.498677] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.514103] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892306, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.545015] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892303, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.605584] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892305, 'name': CreateVM_Task, 'duration_secs': 0.698861} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.609009] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1225.610381] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.610381] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.614025] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1225.614025] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a781cbc-cca8-4ebd-9b34-605b52618fbe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.618080] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1225.618080] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5226137a-d608-ade8-e3b5-1153f4a38c0a" [ 1225.618080] env[68285]: _type = "Task" [ 1225.618080] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.634629] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5226137a-d608-ade8-e3b5-1153f4a38c0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.680242] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0988e849-3dae-4a95-a57f-70c42c937f45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.689099] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670b960e-efc9-4f75-aaf1-fc345a72fff9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.726010] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eedcc9-4c52-423b-9ab2-c90b9356000e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.736504] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83eace64-63ac-47d1-9ef7-de32adb065ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.753907] env[68285]: DEBUG nova.compute.provider_tree [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.794623] env[68285]: DEBUG nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1225.826141] env[68285]: DEBUG nova.network.neutron [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Updated VIF entry in instance network info cache for port d139a9ee-edc0-441e-91e6-f3252990f954. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1225.826213] env[68285]: DEBUG nova.network.neutron [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Updating instance_info_cache with network_info: [{"id": "d139a9ee-edc0-441e-91e6-f3252990f954", "address": "fa:16:3e:64:d9:d0", "network": {"id": "5bd744c2-3232-4600-b398-eca0a844cf62", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-919029468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7131f91e9134b6f89ee5a4ef709ea3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ac3fd84-c373-49f5-82dc-784a6cdb686d", "external-id": "nsx-vlan-transportzone-298", "segmentation_id": 298, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd139a9ee-ed", "ovs_interfaceid": "d139a9ee-edc0-441e-91e6-f3252990f954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.867385] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5203860a-ffd6-92d1-f70e-05d20ce486f4/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1225.868274] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d12d33c-f138-4c3c-ab16-15766e3978a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.875380] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5203860a-ffd6-92d1-f70e-05d20ce486f4/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1225.875654] env[68285]: ERROR oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5203860a-ffd6-92d1-f70e-05d20ce486f4/disk-0.vmdk due to incomplete transfer. [ 1225.876122] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-577ae5a5-6952-43fb-aeef-987e9a341fb5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.884013] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "refresh_cache-8a649b1e-d007-4032-a46c-b479365e5289" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.884132] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "refresh_cache-8a649b1e-d007-4032-a46c-b479365e5289" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.884320] env[68285]: DEBUG nova.network.neutron [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1225.886413] env[68285]: DEBUG oslo_vmware.rw_handles [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5203860a-ffd6-92d1-f70e-05d20ce486f4/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1225.886599] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Uploaded image dc4540fc-09c1-4961-a8fb-336d7b5f5266 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1225.889354] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1225.889845] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-82f55004-1fd2-4069-9b2c-828360a24cd1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.898755] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1225.898755] env[68285]: value = "task-2892307" [ 1225.898755] env[68285]: _type = "Task" [ 1225.898755] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.907967] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892307, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.012244] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892306, 'name': ReconfigVM_Task, 'duration_secs': 0.484245} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.012553] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Reconfigured VM instance instance-00000046 to attach disk [datastore2] be47df2a-aee7-4275-9acb-9cf74367f503/be47df2a-aee7-4275-9acb-9cf74367f503.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1226.013255] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b652d1e-0722-42bf-870f-49a6337d99db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.023990] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1226.023990] env[68285]: value = "task-2892308" [ 1226.023990] env[68285]: _type = "Task" [ 1226.023990] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.025165] env[68285]: DEBUG nova.network.neutron [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Successfully created port: 1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1226.047991] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892308, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.054340] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892303, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.1908} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.054730] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. [ 1226.056284] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f128bc6-c2b5-4750-b144-4e901f7ee4c2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.086213] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1226.086540] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29188941-e713-46e9-9d5b-166b253f4460 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.110442] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1226.110442] env[68285]: value = "task-2892309" [ 1226.110442] env[68285]: _type = "Task" [ 1226.110442] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.119949] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892309, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.129364] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5226137a-d608-ade8-e3b5-1153f4a38c0a, 'name': SearchDatastore_Task, 'duration_secs': 0.058128} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.129629] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.130300] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1226.130300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.130300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.130435] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1226.130801] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81fa42f2-41ff-4b40-b8fe-daf6df8ae5ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.141095] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1226.141322] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1226.142077] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2147a2e1-21f7-4dc7-9427-2b95186b33d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.148651] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1226.148651] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b266de-678e-07c2-fbdc-c2095e1a5dd9" [ 1226.148651] env[68285]: _type = "Task" [ 1226.148651] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.157628] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b266de-678e-07c2-fbdc-c2095e1a5dd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.260307] env[68285]: DEBUG nova.scheduler.client.report [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.329099] env[68285]: DEBUG oslo_concurrency.lockutils [req-f1baa233-4ef9-4605-b312-3f0e1c563228 req-812bbbd5-403a-45ad-842b-fa737d881469 service nova] Releasing lock "refresh_cache-49831327-6e13-412e-ab83-bf350e6e9761" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.410409] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892307, 'name': Destroy_Task, 'duration_secs': 0.506364} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.410784] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Destroyed the VM [ 1226.411227] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1226.411574] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-90f72b14-45bf-4cd4-af35-d2fcfd721146 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.423139] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1226.423139] env[68285]: value = "task-2892310" [ 1226.423139] env[68285]: _type = "Task" [ 1226.423139] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.424166] env[68285]: DEBUG nova.network.neutron [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1226.439618] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892310, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.543320] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892308, 'name': Rename_Task, 'duration_secs': 0.155833} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.543981] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1226.544489] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45f5b319-68e1-4127-83ff-3805dd7c09d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.553776] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1226.553776] env[68285]: value = "task-2892311" [ 1226.553776] env[68285]: _type = "Task" [ 1226.553776] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.566210] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.592350] env[68285]: DEBUG nova.network.neutron [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Updating instance_info_cache with network_info: [{"id": "b3858df6-8c44-4434-93e6-cc789ec6f4c3", "address": "fa:16:3e:2d:d3:41", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3858df6-8c", "ovs_interfaceid": "b3858df6-8c44-4434-93e6-cc789ec6f4c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.623159] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892309, 'name': ReconfigVM_Task, 'duration_secs': 0.429858} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.623326] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Reconfigured VM instance instance-00000063 to attach disk [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1226.624626] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498f0c75-a8db-49e1-a688-ab384b4d6fb7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.659181] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea06bb02-4785-4ad2-affc-b6270886979c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.677797] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b266de-678e-07c2-fbdc-c2095e1a5dd9, 'name': SearchDatastore_Task, 'duration_secs': 0.012086} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.679117] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1226.679117] env[68285]: value = "task-2892312" [ 1226.679117] env[68285]: _type = "Task" [ 1226.679117] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.679381] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea76f55d-8b48-41ca-ae42-54f52f6fedda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.691331] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1226.691331] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5232e712-7b5f-080f-a5ce-4d5c1a809754" [ 1226.691331] env[68285]: _type = "Task" [ 1226.691331] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.695237] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892312, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.705540] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5232e712-7b5f-080f-a5ce-4d5c1a809754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.803946] env[68285]: DEBUG nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1226.838482] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1226.838780] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1226.838959] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1226.839250] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1226.839436] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1226.839616] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1226.839844] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1226.840015] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1226.840192] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1226.840357] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1226.840528] env[68285]: DEBUG nova.virt.hardware [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1226.841446] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3bc32a-a5df-4b7d-bab6-e5dbebdd3802 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.850738] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9764b988-d160-43c7-b3ac-7428c6d68f71 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.938927] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892310, 'name': RemoveSnapshot_Task} progress is 48%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.033649] env[68285]: DEBUG nova.compute.manager [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Received event network-vif-plugged-b3858df6-8c44-4434-93e6-cc789ec6f4c3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.033789] env[68285]: DEBUG oslo_concurrency.lockutils [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] Acquiring lock "8a649b1e-d007-4032-a46c-b479365e5289-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.034017] env[68285]: DEBUG oslo_concurrency.lockutils [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] Lock "8a649b1e-d007-4032-a46c-b479365e5289-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.034293] env[68285]: DEBUG oslo_concurrency.lockutils [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] Lock "8a649b1e-d007-4032-a46c-b479365e5289-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.034351] env[68285]: DEBUG nova.compute.manager [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] No waiting events found dispatching network-vif-plugged-b3858df6-8c44-4434-93e6-cc789ec6f4c3 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1227.034510] env[68285]: WARNING nova.compute.manager [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Received unexpected event network-vif-plugged-b3858df6-8c44-4434-93e6-cc789ec6f4c3 for instance with vm_state building and task_state spawning. [ 1227.034872] env[68285]: DEBUG nova.compute.manager [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Received event network-changed-b3858df6-8c44-4434-93e6-cc789ec6f4c3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.034872] env[68285]: DEBUG nova.compute.manager [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Refreshing instance network info cache due to event network-changed-b3858df6-8c44-4434-93e6-cc789ec6f4c3. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1227.034970] env[68285]: DEBUG oslo_concurrency.lockutils [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] Acquiring lock "refresh_cache-8a649b1e-d007-4032-a46c-b479365e5289" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.067987] env[68285]: DEBUG oslo_vmware.api [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892311, 'name': PowerOnVM_Task, 'duration_secs': 0.486522} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.068269] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1227.094682] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "refresh_cache-8a649b1e-d007-4032-a46c-b479365e5289" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.094993] env[68285]: DEBUG nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Instance network_info: |[{"id": "b3858df6-8c44-4434-93e6-cc789ec6f4c3", "address": "fa:16:3e:2d:d3:41", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3858df6-8c", "ovs_interfaceid": "b3858df6-8c44-4434-93e6-cc789ec6f4c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1227.095305] env[68285]: DEBUG oslo_concurrency.lockutils [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] Acquired lock "refresh_cache-8a649b1e-d007-4032-a46c-b479365e5289" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.095485] env[68285]: DEBUG nova.network.neutron [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Refreshing network info cache for port b3858df6-8c44-4434-93e6-cc789ec6f4c3 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1227.096737] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:d3:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3858df6-8c44-4434-93e6-cc789ec6f4c3', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.104057] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1227.105445] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1227.105445] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a68202e-16d1-49e5-9349-075058ea9b8d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.127615] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.127615] env[68285]: value = "task-2892313" [ 1227.127615] env[68285]: _type = "Task" [ 1227.127615] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.136943] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892313, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.203182] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892312, 'name': ReconfigVM_Task, 'duration_secs': 0.189277} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.204117] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1227.204462] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e742ba4a-a742-4fce-859e-a5d49b3bd990 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.212485] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5232e712-7b5f-080f-a5ce-4d5c1a809754, 'name': SearchDatastore_Task, 'duration_secs': 0.016312} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.213219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.213496] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 49831327-6e13-412e-ab83-bf350e6e9761/49831327-6e13-412e-ab83-bf350e6e9761.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1227.213816] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b82cb267-7ef0-4efd-b6fc-8b8abb4f0c2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.218471] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1227.218471] env[68285]: value = "task-2892314" [ 1227.218471] env[68285]: _type = "Task" [ 1227.218471] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.222512] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1227.222512] env[68285]: value = "task-2892315" [ 1227.222512] env[68285]: _type = "Task" [ 1227.222512] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.229443] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.230338] env[68285]: DEBUG nova.compute.manager [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1227.231414] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69b4cbf-7e56-4d7e-8d59-2910323c3536 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.236536] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.274172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.491s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.277373] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.847s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.277656] env[68285]: DEBUG nova.objects.instance [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lazy-loading 'resources' on Instance uuid ef87ff30-ef45-4abb-8696-d5493572703a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.452364] env[68285]: DEBUG oslo_vmware.api [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892310, 'name': RemoveSnapshot_Task, 'duration_secs': 0.592641} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.452364] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1227.452364] env[68285]: INFO nova.compute.manager [None req-b5031e3b-cf91-4ecc-af85-1dbf50aeebf8 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Took 16.36 seconds to snapshot the instance on the hypervisor. [ 1227.620040] env[68285]: DEBUG nova.compute.manager [req-42d435ca-78af-4160-9722-740954a01d05 req-3de441ac-17f6-4195-a978-42832a4aadb1 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received event network-vif-plugged-1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.620291] env[68285]: DEBUG oslo_concurrency.lockutils [req-42d435ca-78af-4160-9722-740954a01d05 req-3de441ac-17f6-4195-a978-42832a4aadb1 service nova] Acquiring lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.620505] env[68285]: DEBUG oslo_concurrency.lockutils [req-42d435ca-78af-4160-9722-740954a01d05 req-3de441ac-17f6-4195-a978-42832a4aadb1 service nova] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.620674] env[68285]: DEBUG oslo_concurrency.lockutils [req-42d435ca-78af-4160-9722-740954a01d05 req-3de441ac-17f6-4195-a978-42832a4aadb1 service nova] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.620836] env[68285]: DEBUG nova.compute.manager [req-42d435ca-78af-4160-9722-740954a01d05 req-3de441ac-17f6-4195-a978-42832a4aadb1 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] No waiting events found dispatching network-vif-plugged-1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1227.621034] env[68285]: WARNING nova.compute.manager [req-42d435ca-78af-4160-9722-740954a01d05 req-3de441ac-17f6-4195-a978-42832a4aadb1 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received unexpected event network-vif-plugged-1f6dacae-76ce-408a-8e61-deddf144ba68 for instance with vm_state building and task_state spawning. [ 1227.640996] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892313, 'name': CreateVM_Task, 'duration_secs': 0.46597} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.641216] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1227.642065] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.642246] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.642632] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1227.643622] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f87fa4db-a702-4eb2-a111-f9b73b0f43e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.649740] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1227.649740] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fba133-c80d-bcf0-d769-fa2f7c70c424" [ 1227.649740] env[68285]: _type = "Task" [ 1227.649740] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.660492] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fba133-c80d-bcf0-d769-fa2f7c70c424, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.670760] env[68285]: DEBUG nova.network.neutron [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Successfully updated port: 1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1227.730314] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892314, 'name': PowerOnVM_Task} progress is 96%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.740124] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892315, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.753260] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d513fb52-4c6f-4c7c-bc12-ae38197e3693 tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.186s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.854971] env[68285]: INFO nova.scheduler.client.report [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted allocation for migration 022b54e2-d1a4-4903-8d15-41f4cac86f16 [ 1227.880030] env[68285]: DEBUG nova.network.neutron [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Updated VIF entry in instance network info cache for port b3858df6-8c44-4434-93e6-cc789ec6f4c3. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1227.880242] env[68285]: DEBUG nova.network.neutron [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Updating instance_info_cache with network_info: [{"id": "b3858df6-8c44-4434-93e6-cc789ec6f4c3", "address": "fa:16:3e:2d:d3:41", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3858df6-8c", "ovs_interfaceid": "b3858df6-8c44-4434-93e6-cc789ec6f4c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.069456] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570ccfba-b814-4b78-b79b-ed16b6300d47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.080274] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4ad814-5bd8-46a8-985c-da6aa451ceed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.111457] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821816bd-0bc7-4314-ad85-832946cbd60e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.119777] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c257ea-a0a4-4f67-97c3-000cda141060 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.134470] env[68285]: DEBUG nova.compute.provider_tree [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.159928] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fba133-c80d-bcf0-d769-fa2f7c70c424, 'name': SearchDatastore_Task, 'duration_secs': 0.031195} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.160233] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.164030] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1228.164030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.164030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.164030] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1228.164030] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e102878c-a953-4996-acf1-149010dd501e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.177032] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.177181] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.177324] env[68285]: DEBUG nova.network.neutron [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1228.179498] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1228.179664] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1228.180587] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c252f5a9-0524-46f1-bc70-5dc535108e06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.186410] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1228.186410] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525de633-0f35-5d36-7afe-36bdcddd15f7" [ 1228.186410] env[68285]: _type = "Task" [ 1228.186410] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.196030] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525de633-0f35-5d36-7afe-36bdcddd15f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.234764] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720034} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.235008] env[68285]: DEBUG oslo_vmware.api [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892314, 'name': PowerOnVM_Task, 'duration_secs': 0.567171} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.235347] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 49831327-6e13-412e-ab83-bf350e6e9761/49831327-6e13-412e-ab83-bf350e6e9761.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1228.235659] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1228.235963] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1228.237632] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f950170-feaf-4668-a4d6-c267a5a4125b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.240103] env[68285]: DEBUG nova.compute.manager [None req-32e654cf-a360-4692-a82a-6178585357f5 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1228.240866] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207beb07-df30-4a6f-b7d2-4e3d3f8e083a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.252277] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1228.252277] env[68285]: value = "task-2892316" [ 1228.252277] env[68285]: _type = "Task" [ 1228.252277] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.260893] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892316, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.363007] env[68285]: DEBUG oslo_concurrency.lockutils [None req-69e62933-a7b5-42a6-b0cf-d3ed12843f9f tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 14.419s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.383178] env[68285]: DEBUG oslo_concurrency.lockutils [req-dbf1cfba-cc19-4eab-bdf3-765a4aa9a03d req-e6874a3f-99db-4f6c-87cb-88bc2e0f9448 service nova] Releasing lock "refresh_cache-8a649b1e-d007-4032-a46c-b479365e5289" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.641018] env[68285]: DEBUG nova.scheduler.client.report [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.702642] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525de633-0f35-5d36-7afe-36bdcddd15f7, 'name': SearchDatastore_Task, 'duration_secs': 0.065617} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.704145] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-055b13b6-5d60-4d7e-9fa1-17ec7eb5f41b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.712313] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1228.712313] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bcae00-337e-4c82-ac30-e0a87dd5d34e" [ 1228.712313] env[68285]: _type = "Task" [ 1228.712313] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.717022] env[68285]: DEBUG nova.network.neutron [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1228.729480] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bcae00-337e-4c82-ac30-e0a87dd5d34e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.769231] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892316, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076337} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.771926] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1228.773502] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65dc7ddc-3003-4676-83fe-17f7627a7d39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.799866] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 49831327-6e13-412e-ab83-bf350e6e9761/49831327-6e13-412e-ab83-bf350e6e9761.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1228.800777] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4adad892-b2ac-41b6-ba0f-edd00ec45411 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.826729] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1228.826729] env[68285]: value = "task-2892317" [ 1228.826729] env[68285]: _type = "Task" [ 1228.826729] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.837496] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892317, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.885975] env[68285]: DEBUG nova.network.neutron [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.929843] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.930104] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.930310] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.930485] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.930681] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.933462] env[68285]: INFO nova.compute.manager [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Terminating instance [ 1229.143339] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.866s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.146028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.531s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.147262] env[68285]: INFO nova.compute.claims [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1229.161218] env[68285]: INFO nova.scheduler.client.report [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Deleted allocations for instance ef87ff30-ef45-4abb-8696-d5493572703a [ 1229.227327] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bcae00-337e-4c82-ac30-e0a87dd5d34e, 'name': SearchDatastore_Task, 'duration_secs': 0.050309} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.227599] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.227861] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8a649b1e-d007-4032-a46c-b479365e5289/8a649b1e-d007-4032-a46c-b479365e5289.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1229.228149] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67e609af-bbda-4066-8a8c-d69fd235630d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.236470] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1229.236470] env[68285]: value = "task-2892318" [ 1229.236470] env[68285]: _type = "Task" [ 1229.236470] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.245816] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.341423] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892317, 'name': ReconfigVM_Task, 'duration_secs': 0.314033} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.341724] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 49831327-6e13-412e-ab83-bf350e6e9761/49831327-6e13-412e-ab83-bf350e6e9761.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1229.342387] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-261a67e4-e57a-4e26-ad8b-51f44dc532e6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.350437] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1229.350437] env[68285]: value = "task-2892319" [ 1229.350437] env[68285]: _type = "Task" [ 1229.350437] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.360568] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892319, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.389686] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.389832] env[68285]: DEBUG nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Instance network_info: |[{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1229.390268] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:71:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f6dacae-76ce-408a-8e61-deddf144ba68', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1229.398787] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Creating folder: Project (75a6837bced940cdaf5743b8e94cce29). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1229.399126] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f2d62c4-0819-4dcc-9eb1-26f6bf7e3dfe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.412070] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Created folder: Project (75a6837bced940cdaf5743b8e94cce29) in parent group-v580775. [ 1229.412410] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Creating folder: Instances. Parent ref: group-v581056. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1229.412706] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8021fc50-2fc6-47a8-b2fc-9e63be6c9dc7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.427356] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Created folder: Instances in parent group-v581056. [ 1229.427721] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1229.428067] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1229.428208] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bd3dea9-e8fc-490b-819b-3d737398068e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.444459] env[68285]: DEBUG nova.compute.manager [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1229.444679] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1229.445532] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edbe039-25aa-4c0c-8796-910c72726e81 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.454291] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1229.455822] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35d82afb-1680-4110-971c-fb98c132d5ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.457102] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1229.457102] env[68285]: value = "task-2892322" [ 1229.457102] env[68285]: _type = "Task" [ 1229.457102] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.464784] env[68285]: DEBUG oslo_vmware.api [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1229.464784] env[68285]: value = "task-2892323" [ 1229.464784] env[68285]: _type = "Task" [ 1229.464784] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.471208] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892322, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.477810] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b05951-b851-45ee-9b27-7225b4a67870 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.489740] env[68285]: DEBUG oslo_vmware.api [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.490686] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.491435] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.498018] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc160dfc-8f7b-455f-8f2c-243daf46e4fd tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Suspending the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1229.498018] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9b081ed1-8daa-400b-b33f-d54047a9a2bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.510872] env[68285]: DEBUG oslo_vmware.api [None req-dc160dfc-8f7b-455f-8f2c-243daf46e4fd tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1229.510872] env[68285]: value = "task-2892324" [ 1229.510872] env[68285]: _type = "Task" [ 1229.510872] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.522132] env[68285]: DEBUG oslo_vmware.api [None req-dc160dfc-8f7b-455f-8f2c-243daf46e4fd tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892324, 'name': SuspendVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.656085] env[68285]: DEBUG nova.compute.manager [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received event network-changed-1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1229.656085] env[68285]: DEBUG nova.compute.manager [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Refreshing instance network info cache due to event network-changed-1f6dacae-76ce-408a-8e61-deddf144ba68. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1229.656416] env[68285]: DEBUG oslo_concurrency.lockutils [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] Acquiring lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.656450] env[68285]: DEBUG oslo_concurrency.lockutils [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] Acquired lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.656614] env[68285]: DEBUG nova.network.neutron [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Refreshing network info cache for port 1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1229.673246] env[68285]: DEBUG oslo_concurrency.lockutils [None req-67e2e610-6df6-4127-8656-94389105bd1f tempest-ImagesTestJSON-1472763889 tempest-ImagesTestJSON-1472763889-project-member] Lock "ef87ff30-ef45-4abb-8696-d5493572703a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.019s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.747913] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892318, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.865473] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892319, 'name': Rename_Task, 'duration_secs': 0.184186} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.865997] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1229.866485] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a227bb49-7643-4aea-ba3c-e6ba369791f7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.876132] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1229.876132] env[68285]: value = "task-2892325" [ 1229.876132] env[68285]: _type = "Task" [ 1229.876132] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.886786] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.971691] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892322, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.981868] env[68285]: DEBUG oslo_vmware.api [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892323, 'name': PowerOffVM_Task, 'duration_secs': 0.39307} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.982483] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1229.982579] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1229.982846] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-415d6f59-3c64-483c-baef-f6acda9adb67 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.994877] env[68285]: DEBUG nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1230.022673] env[68285]: DEBUG oslo_vmware.api [None req-dc160dfc-8f7b-455f-8f2c-243daf46e4fd tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892324, 'name': SuspendVM_Task} progress is 79%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.075745] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1230.075960] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1230.076186] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleting the datastore file [datastore2] fe8e0a71-e9b0-4035-a696-51455d6fc473 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1230.076496] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b2010ae-1736-4fa4-838b-616bda2734f7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.086339] env[68285]: DEBUG oslo_vmware.api [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1230.086339] env[68285]: value = "task-2892327" [ 1230.086339] env[68285]: _type = "Task" [ 1230.086339] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.096703] env[68285]: DEBUG oslo_vmware.api [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.249731] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892318, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.865428} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.252841] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8a649b1e-d007-4032-a46c-b479365e5289/8a649b1e-d007-4032-a46c-b479365e5289.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1230.252841] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1230.253610] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cec2174c-eae1-41bd-9325-2cf4f0df2256 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.262511] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1230.262511] env[68285]: value = "task-2892328" [ 1230.262511] env[68285]: _type = "Task" [ 1230.262511] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.275068] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.393165] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892325, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.441069] env[68285]: DEBUG nova.network.neutron [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updated VIF entry in instance network info cache for port 1f6dacae-76ce-408a-8e61-deddf144ba68. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1230.441523] env[68285]: DEBUG nova.network.neutron [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.472213] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892322, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.478020] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa52c16-5946-481f-958c-1224913b8992 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.484891] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7c6b77-6d96-4a53-83ed-e6dd4494379e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.529812] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2eeab8-47e3-4b2c-b3ea-7eb46db59b2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.539419] env[68285]: DEBUG oslo_vmware.api [None req-dc160dfc-8f7b-455f-8f2c-243daf46e4fd tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892324, 'name': SuspendVM_Task, 'duration_secs': 0.976888} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.545217] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-dc160dfc-8f7b-455f-8f2c-243daf46e4fd tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Suspended the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1230.545217] env[68285]: DEBUG nova.compute.manager [None req-dc160dfc-8f7b-455f-8f2c-243daf46e4fd tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1230.545217] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869e135f-116b-43b2-a02f-5c4c68399a49 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.546875] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117e09dd-d391-4231-8821-1f4a3c88c887 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.552486] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.567715] env[68285]: DEBUG nova.compute.provider_tree [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.597413] env[68285]: DEBUG oslo_vmware.api [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222956} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.597685] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1230.597865] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1230.598107] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1230.598286] env[68285]: INFO nova.compute.manager [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1230.598534] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1230.598737] env[68285]: DEBUG nova.compute.manager [-] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1230.598840] env[68285]: DEBUG nova.network.neutron [-] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1230.773161] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130469} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.773452] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.774239] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf79599-3ee9-4d0a-a6ff-603c2eea662b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.797271] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 8a649b1e-d007-4032-a46c-b479365e5289/8a649b1e-d007-4032-a46c-b479365e5289.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.797563] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f967439c-6615-4f5d-9423-b3b250c4384b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.829023] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1230.829023] env[68285]: value = "task-2892329" [ 1230.829023] env[68285]: _type = "Task" [ 1230.829023] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.564555] env[68285]: DEBUG oslo_concurrency.lockutils [req-044fc6a2-47c9-417e-9a35-e9868e6044ad req-26d25368-3b0e-4f94-aeeb-66e441b3a9e6 service nova] Releasing lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.564555] env[68285]: DEBUG nova.scheduler.client.report [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1231.571058] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.571353] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.571604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.571848] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.572079] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.573905] env[68285]: INFO nova.compute.manager [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Rescuing [ 1231.574218] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.574442] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.574695] env[68285]: DEBUG nova.network.neutron [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1231.579660] env[68285]: INFO nova.compute.manager [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Terminating instance [ 1231.581826] env[68285]: DEBUG nova.network.neutron [-] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.598245] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892329, 'name': ReconfigVM_Task, 'duration_secs': 0.292132} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.604392] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 8a649b1e-d007-4032-a46c-b479365e5289/8a649b1e-d007-4032-a46c-b479365e5289.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.605174] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892322, 'name': CreateVM_Task, 'duration_secs': 1.106087} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.605391] env[68285]: DEBUG oslo_vmware.api [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892325, 'name': PowerOnVM_Task, 'duration_secs': 0.689937} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.605833] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36fa9b87-b926-474b-8e2e-7988728fc8ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.607118] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1231.607369] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1231.608887] env[68285]: INFO nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Took 9.66 seconds to spawn the instance on the hypervisor. [ 1231.608887] env[68285]: DEBUG nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1231.608887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.608887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.609202] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1231.609913] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775fcefe-fe8c-4167-a101-c29d72b9e892 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.612604] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f40c2ed-51af-4251-846d-f11117307098 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.618394] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1231.618394] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525bacd4-b947-64c4-c496-418534f2aced" [ 1231.618394] env[68285]: _type = "Task" [ 1231.618394] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.621442] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1231.621442] env[68285]: value = "task-2892330" [ 1231.621442] env[68285]: _type = "Task" [ 1231.621442] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.638554] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892330, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.644571] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525bacd4-b947-64c4-c496-418534f2aced, 'name': SearchDatastore_Task, 'duration_secs': 0.011555} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.644901] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.645175] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1231.645411] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.645559] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.645778] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.646319] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c7a789b-937c-4de7-941a-8f209971ea3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.657021] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.657232] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1231.657998] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44955542-b1a9-4337-a292-8daa57f64558 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.663764] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1231.663764] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522e44c1-de40-2778-85e4-c23ecdcd591d" [ 1231.663764] env[68285]: _type = "Task" [ 1231.663764] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.673234] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522e44c1-de40-2778-85e4-c23ecdcd591d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.765826] env[68285]: DEBUG nova.compute.manager [req-f1f79e76-6160-42bb-a9f1-24dd6b5570cc req-376b0ec2-f91e-471b-9ee6-52a65bbb6c55 service nova] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Received event network-vif-deleted-9199e860-a70a-4057-93f0-526a4c8a2ed7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1232.086020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.936s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.086020] env[68285]: DEBUG nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1232.086020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.245s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.094172] env[68285]: INFO nova.compute.manager [-] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Took 1.50 seconds to deallocate network for instance. [ 1232.096052] env[68285]: INFO nova.compute.manager [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Resuming [ 1232.096052] env[68285]: DEBUG nova.objects.instance [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lazy-loading 'flavor' on Instance uuid be47df2a-aee7-4275-9acb-9cf74367f503 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.097752] env[68285]: DEBUG nova.compute.manager [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1232.097942] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1232.104063] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e7fc24-3ded-49b5-a389-e3b912f20ea5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.113891] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1232.115138] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5519d1b-0740-4e53-81bf-3940c1be9654 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.124171] env[68285]: DEBUG oslo_vmware.api [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1232.124171] env[68285]: value = "task-2892331" [ 1232.124171] env[68285]: _type = "Task" [ 1232.124171] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.151052] env[68285]: INFO nova.compute.manager [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Took 22.26 seconds to build instance. [ 1232.154641] env[68285]: DEBUG oslo_vmware.api [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.160976] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892330, 'name': Rename_Task, 'duration_secs': 0.149459} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.161465] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.161774] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f5dc8c3-d333-4413-9a4a-d462b0b05062 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.170747] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1232.170747] env[68285]: value = "task-2892332" [ 1232.170747] env[68285]: _type = "Task" [ 1232.170747] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.183158] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522e44c1-de40-2778-85e4-c23ecdcd591d, 'name': SearchDatastore_Task, 'duration_secs': 0.00904} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.184410] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d960b43-2e6c-4793-8be1-59fae4ea66ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.191046] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.197028] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1232.197028] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524d03c5-41c2-11f1-33eb-200a83f0203a" [ 1232.197028] env[68285]: _type = "Task" [ 1232.197028] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.211664] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524d03c5-41c2-11f1-33eb-200a83f0203a, 'name': SearchDatastore_Task, 'duration_secs': 0.012431} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.212042] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.213996] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8917672f-3b0d-42a1-b8b1-94ac47ce941a/8917672f-3b0d-42a1-b8b1-94ac47ce941a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1232.213996] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed616766-bd70-4b4d-b880-39d0aaeafbd2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.222940] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1232.222940] env[68285]: value = "task-2892333" [ 1232.222940] env[68285]: _type = "Task" [ 1232.222940] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.236275] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.427584] env[68285]: DEBUG nova.network.neutron [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Updating instance_info_cache with network_info: [{"id": "1d10105d-1754-49c2-9593-7de22107732e", "address": "fa:16:3e:38:96:08", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10105d-17", "ovs_interfaceid": "1d10105d-1754-49c2-9593-7de22107732e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.589269] env[68285]: DEBUG nova.compute.utils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1232.590743] env[68285]: DEBUG nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1232.590921] env[68285]: DEBUG nova.network.neutron [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1232.595279] env[68285]: INFO nova.compute.claims [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1232.610964] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.638627] env[68285]: DEBUG oslo_vmware.api [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892331, 'name': PowerOffVM_Task, 'duration_secs': 0.201127} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.638988] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1232.639202] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1232.639864] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f90cb0e5-d676-4846-a914-5a73346a9d8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.656052] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7ca53003-ec91-447f-a172-b3734a8d667f tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "49831327-6e13-412e-ab83-bf350e6e9761" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.779s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.656052] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.656455] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.656496] env[68285]: INFO nova.compute.manager [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Rebooting instance [ 1232.667446] env[68285]: DEBUG nova.policy [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb7f978e7fa64e88af5756fca97fce6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4471597d3345443aa28b97acd91847e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1232.684525] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "49831327-6e13-412e-ab83-bf350e6e9761" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.684836] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "49831327-6e13-412e-ab83-bf350e6e9761" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.685056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "49831327-6e13-412e-ab83-bf350e6e9761-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.685245] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "49831327-6e13-412e-ab83-bf350e6e9761-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.685416] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "49831327-6e13-412e-ab83-bf350e6e9761-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.687048] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892332, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.688048] env[68285]: INFO nova.compute.manager [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Terminating instance [ 1232.737826] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505545} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.739383] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 8917672f-3b0d-42a1-b8b1-94ac47ce941a/8917672f-3b0d-42a1-b8b1-94ac47ce941a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1232.739799] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1232.740190] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1232.740521] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1232.740838] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Deleting the datastore file [datastore1] 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1232.741191] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fcc5f6b-5633-4764-a3bb-7112ed8c3958 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.743789] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a2aba28-008e-4120-ba1c-c69c6acc8e67 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.753448] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1232.753448] env[68285]: value = "task-2892335" [ 1232.753448] env[68285]: _type = "Task" [ 1232.753448] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.755562] env[68285]: DEBUG oslo_vmware.api [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1232.755562] env[68285]: value = "task-2892336" [ 1232.755562] env[68285]: _type = "Task" [ 1232.755562] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.772383] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.776132] env[68285]: DEBUG oslo_vmware.api [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.932893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.020027] env[68285]: DEBUG nova.network.neutron [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Successfully created port: cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1233.106599] env[68285]: DEBUG nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1233.115607] env[68285]: INFO nova.compute.resource_tracker [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating resource usage from migration 0f8618b8-03e2-48c3-886c-a1c9bc490e78 [ 1233.184124] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892332, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.191633] env[68285]: DEBUG nova.compute.manager [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1233.191853] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1233.193045] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.193671] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1233.193671] env[68285]: DEBUG nova.network.neutron [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1233.195128] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1c2688-f841-4b02-a37a-c1f6f818a904 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.204776] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1233.207811] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-723f99c2-a60e-4e92-996b-9a7664415351 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.219492] env[68285]: DEBUG oslo_vmware.api [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1233.219492] env[68285]: value = "task-2892337" [ 1233.219492] env[68285]: _type = "Task" [ 1233.219492] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.233738] env[68285]: DEBUG oslo_vmware.api [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.268986] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092343} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.269372] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1233.270648] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6416ffd1-f7b0-4538-82a2-fc5869921bef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.277667] env[68285]: DEBUG oslo_vmware.api [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354993} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.280736] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1233.282041] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1233.282041] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1233.282041] env[68285]: INFO nova.compute.manager [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1233.282271] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1233.284135] env[68285]: DEBUG nova.compute.manager [-] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1233.284135] env[68285]: DEBUG nova.network.neutron [-] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1233.304981] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 8917672f-3b0d-42a1-b8b1-94ac47ce941a/8917672f-3b0d-42a1-b8b1-94ac47ce941a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1233.311022] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-965d9136-8157-4fbb-ad09-9b2ad145ff6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.333176] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1233.333176] env[68285]: value = "task-2892338" [ 1233.333176] env[68285]: _type = "Task" [ 1233.333176] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.347883] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892338, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.464639] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e5ce82-766d-4263-b6a1-edac6482b94a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.474442] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baac837e-30de-4568-b41a-b479c33bae2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.511308] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f09b54-6ebc-44f2-8c16-d504bcbc6043 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.520805] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72266f7f-c1ec-4bcf-82ef-d54ce44c1242 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.536351] env[68285]: DEBUG nova.compute.provider_tree [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.626570] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.630296] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquired lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1233.630519] env[68285]: DEBUG nova.network.neutron [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1233.683225] env[68285]: DEBUG oslo_vmware.api [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892332, 'name': PowerOnVM_Task, 'duration_secs': 1.077571} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.683607] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1233.683734] env[68285]: INFO nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Took 9.30 seconds to spawn the instance on the hypervisor. [ 1233.683871] env[68285]: DEBUG nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.684648] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd39bdc-92ef-4b15-ad29-93c019a46388 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.729950] env[68285]: DEBUG oslo_vmware.api [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892337, 'name': PowerOffVM_Task, 'duration_secs': 0.217143} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.730246] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1233.730415] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1233.730664] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf45feae-1e5b-4a8f-b92d-bad55d445f5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.794901] env[68285]: DEBUG nova.compute.manager [req-775539a1-e1e9-45ac-a676-843fb2fa2c7f req-4da29b8e-8e81-476e-be49-6a1887f777d0 service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Received event network-vif-deleted-ea845a8a-8eb0-4821-aef2-d5e99dd606ec {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1233.795706] env[68285]: INFO nova.compute.manager [req-775539a1-e1e9-45ac-a676-843fb2fa2c7f req-4da29b8e-8e81-476e-be49-6a1887f777d0 service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Neutron deleted interface ea845a8a-8eb0-4821-aef2-d5e99dd606ec; detaching it from the instance and deleting it from the info cache [ 1233.796129] env[68285]: DEBUG nova.network.neutron [req-775539a1-e1e9-45ac-a676-843fb2fa2c7f req-4da29b8e-8e81-476e-be49-6a1887f777d0 service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.806830] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1233.806830] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1233.806830] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Deleting the datastore file [datastore2] 49831327-6e13-412e-ab83-bf350e6e9761 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1233.807173] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5269c083-3fb3-489b-8b06-3a63ef84d32f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.815380] env[68285]: DEBUG oslo_vmware.api [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for the task: (returnval){ [ 1233.815380] env[68285]: value = "task-2892340" [ 1233.815380] env[68285]: _type = "Task" [ 1233.815380] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.826921] env[68285]: DEBUG oslo_vmware.api [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.844352] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892338, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.038785] env[68285]: DEBUG nova.network.neutron [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.042217] env[68285]: DEBUG nova.scheduler.client.report [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1234.090223] env[68285]: DEBUG nova.network.neutron [-] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.135244] env[68285]: DEBUG nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1234.176027] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1234.176027] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1234.176027] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1234.176027] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1234.176027] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1234.176027] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1234.176521] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1234.176903] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1234.177343] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1234.178110] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1234.178110] env[68285]: DEBUG nova.virt.hardware [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1234.179095] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bae7c7-3530-4fb8-9208-dc72cd2b4463 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.188442] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a446d00-b3ab-4c47-adfa-71e4f5dbb819 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.216681] env[68285]: INFO nova.compute.manager [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Took 22.42 seconds to build instance. [ 1234.302676] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfc3a59c-78f3-47c7-ba7f-a8a1f6132604 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.314302] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818ada14-dbfa-42f6-ae58-a8f536c2b350 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.363998] env[68285]: DEBUG nova.compute.manager [req-775539a1-e1e9-45ac-a676-843fb2fa2c7f req-4da29b8e-8e81-476e-be49-6a1887f777d0 service nova] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Detach interface failed, port_id=ea845a8a-8eb0-4821-aef2-d5e99dd606ec, reason: Instance 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1234.370604] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892338, 'name': ReconfigVM_Task, 'duration_secs': 0.821892} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.370846] env[68285]: DEBUG oslo_vmware.api [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Task: {'id': task-2892340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427208} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.371525] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 8917672f-3b0d-42a1-b8b1-94ac47ce941a/8917672f-3b0d-42a1-b8b1-94ac47ce941a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1234.372580] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1234.372769] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1234.372930] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1234.373109] env[68285]: INFO nova.compute.manager [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1234.373341] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1234.373550] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-133d0124-6de1-4a2b-959b-a1213fc2c244 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.375532] env[68285]: DEBUG nova.compute.manager [-] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1234.375627] env[68285]: DEBUG nova.network.neutron [-] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1234.384788] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1234.384788] env[68285]: value = "task-2892341" [ 1234.384788] env[68285]: _type = "Task" [ 1234.384788] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.395614] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892341, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.478250] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.478799] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f66670d-9542-4b08-ac30-965e1862278c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.487878] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1234.487878] env[68285]: value = "task-2892342" [ 1234.487878] env[68285]: _type = "Task" [ 1234.487878] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.498323] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.544661] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1234.546499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.461s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.546725] env[68285]: INFO nova.compute.manager [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Migrating [ 1234.566366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.148s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.567859] env[68285]: INFO nova.compute.claims [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1234.584044] env[68285]: DEBUG nova.network.neutron [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [{"id": "724df450-925b-47ae-884b-4935b5b95ab2", "address": "fa:16:3e:0f:59:8c", "network": {"id": "19fe9f45-cb71-4a4f-8a94-0020f8d0e8a7", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-693820438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fb202eb50a74c558edb6fdb9dfaf077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap724df450-92", "ovs_interfaceid": "724df450-925b-47ae-884b-4935b5b95ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.592273] env[68285]: INFO nova.compute.manager [-] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Took 1.31 seconds to deallocate network for instance. [ 1234.721971] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a4b275f-a06d-480d-bd17-8b1929bbf699 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "8a649b1e-d007-4032-a46c-b479365e5289" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.938s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.897073] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892341, 'name': Rename_Task, 'duration_secs': 0.320642} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.898605] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1234.899672] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6772f01c-cd22-40a0-ac42-9bf9ac5c54f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.909829] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1234.909829] env[68285]: value = "task-2892343" [ 1234.909829] env[68285]: _type = "Task" [ 1234.909829] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.918953] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.002790] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892342, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.070645] env[68285]: DEBUG nova.compute.manager [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1235.077018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdcee8f-2b83-4c69-9367-228005c5fdf4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.086645] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.086823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.087155] env[68285]: DEBUG nova.network.neutron [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1235.089122] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Releasing lock "refresh_cache-be47df2a-aee7-4275-9acb-9cf74367f503" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.091191] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6beaf2d-d7b6-49c0-99e4-dca3c63e7a44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.104335] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.109640] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Resuming the VM {{(pid=68285) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1235.109972] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a50dd979-3c99-48dd-bd1d-cc0e2c400188 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.117218] env[68285]: DEBUG nova.network.neutron [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Successfully updated port: cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1235.129036] env[68285]: DEBUG oslo_vmware.api [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1235.129036] env[68285]: value = "task-2892344" [ 1235.129036] env[68285]: _type = "Task" [ 1235.129036] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.141190] env[68285]: DEBUG oslo_vmware.api [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.204428] env[68285]: DEBUG nova.compute.manager [req-6e3121c4-2b80-450c-95a3-17f70893cb50 req-e7c00554-8d1c-4c71-8271-12b142db6131 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Received event network-vif-deleted-d139a9ee-edc0-441e-91e6-f3252990f954 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1235.204621] env[68285]: INFO nova.compute.manager [req-6e3121c4-2b80-450c-95a3-17f70893cb50 req-e7c00554-8d1c-4c71-8271-12b142db6131 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Neutron deleted interface d139a9ee-edc0-441e-91e6-f3252990f954; detaching it from the instance and deleting it from the info cache [ 1235.204705] env[68285]: DEBUG nova.network.neutron [req-6e3121c4-2b80-450c-95a3-17f70893cb50 req-e7c00554-8d1c-4c71-8271-12b142db6131 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.381177] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837461e7-099e-4308-b7d0-74d1553637f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.390900] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6316bb-1dd4-4e63-af0e-729f64c6c3a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.428762] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03652db2-4bad-4e68-857d-e6efdb77c516 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.440594] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af72f908-68cf-4a56-9dc9-59b4d886cf7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.444524] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892343, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.455422] env[68285]: DEBUG nova.compute.provider_tree [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1235.499874] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892342, 'name': PowerOffVM_Task, 'duration_secs': 0.534011} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.500446] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.501308] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c15f51-e330-4b3d-a5af-5e1e796b2ec6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.525503] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddce3551-cba2-4c07-bd33-3cf038ef78a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.538112] env[68285]: DEBUG nova.network.neutron [-] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.562638] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.562917] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc2e5953-1f10-49f6-a889-ae6b7751db0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.571167] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1235.571167] env[68285]: value = "task-2892345" [ 1235.571167] env[68285]: _type = "Task" [ 1235.571167] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.580864] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.620618] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.620924] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.621499] env[68285]: DEBUG nova.network.neutron [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1235.643267] env[68285]: DEBUG oslo_vmware.api [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892344, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.714228] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd3c7803-f8cd-4696-b29b-3eb85d0298ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.716871] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.717249] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.728995] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7f9f2a-4bef-4ac4-b707-35411e027b93 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.773359] env[68285]: DEBUG nova.compute.manager [req-6e3121c4-2b80-450c-95a3-17f70893cb50 req-e7c00554-8d1c-4c71-8271-12b142db6131 service nova] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Detach interface failed, port_id=d139a9ee-edc0-441e-91e6-f3252990f954, reason: Instance 49831327-6e13-412e-ab83-bf350e6e9761 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1235.835710] env[68285]: DEBUG nova.compute.manager [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Received event network-vif-plugged-cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1235.835958] env[68285]: DEBUG oslo_concurrency.lockutils [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] Acquiring lock "b2199b56-64bd-4096-b877-e10656b09313-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.836185] env[68285]: DEBUG oslo_concurrency.lockutils [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] Lock "b2199b56-64bd-4096-b877-e10656b09313-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.836430] env[68285]: DEBUG oslo_concurrency.lockutils [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] Lock "b2199b56-64bd-4096-b877-e10656b09313-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.836514] env[68285]: DEBUG nova.compute.manager [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] No waiting events found dispatching network-vif-plugged-cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1235.836679] env[68285]: WARNING nova.compute.manager [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Received unexpected event network-vif-plugged-cdb4de32-3a3d-4f10-abb3-9d403cde25c7 for instance with vm_state building and task_state spawning. [ 1235.836836] env[68285]: DEBUG nova.compute.manager [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Received event network-changed-cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1235.836988] env[68285]: DEBUG nova.compute.manager [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Refreshing instance network info cache due to event network-changed-cdb4de32-3a3d-4f10-abb3-9d403cde25c7. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1235.837172] env[68285]: DEBUG oslo_concurrency.lockutils [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] Acquiring lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.937076] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892343, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.947821] env[68285]: DEBUG nova.network.neutron [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.958339] env[68285]: DEBUG nova.scheduler.client.report [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1236.038287] env[68285]: INFO nova.compute.manager [-] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Took 1.66 seconds to deallocate network for instance. [ 1236.083392] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1236.083551] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.083670] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.083826] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.084026] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.084269] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7097cfaf-faf3-4f4d-b6a0-d862c24f4b8a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.097090] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.097304] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1236.098387] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f0c04c-8a45-435b-aff6-005e8bc64b02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.105074] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1236.105074] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f296d2-0c33-1c22-a99c-d68ada8439a8" [ 1236.105074] env[68285]: _type = "Task" [ 1236.105074] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.111436] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c4ca4d-c3b4-414f-9531-38227d2eac87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.117051] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f296d2-0c33-1c22-a99c-d68ada8439a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.121508] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Doing hard reboot of VM {{(pid=68285) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1236.121752] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-23d29c23-6104-47f3-9a07-26a9018a0078 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.133275] env[68285]: DEBUG oslo_vmware.api [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1236.133275] env[68285]: value = "task-2892346" [ 1236.133275] env[68285]: _type = "Task" [ 1236.133275] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.141830] env[68285]: DEBUG oslo_vmware.api [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892344, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.147369] env[68285]: DEBUG oslo_vmware.api [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892346, 'name': ResetVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.169765] env[68285]: DEBUG nova.network.neutron [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1236.220768] env[68285]: DEBUG nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1236.356867] env[68285]: DEBUG nova.network.neutron [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Updating instance_info_cache with network_info: [{"id": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "address": "fa:16:3e:2d:b2:41", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdb4de32-3a", "ovs_interfaceid": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.437283] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892343, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.452873] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.463666] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.897s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.464283] env[68285]: DEBUG nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1236.467109] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.915s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.469229] env[68285]: INFO nova.compute.claims [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.546283] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.617952] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f296d2-0c33-1c22-a99c-d68ada8439a8, 'name': SearchDatastore_Task, 'duration_secs': 0.018627} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.618875] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5438edfa-e628-435c-92c8-23d9f311372d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.628469] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1236.628469] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5220046a-4cff-bcf3-b8be-7fbaf94cc3ba" [ 1236.628469] env[68285]: _type = "Task" [ 1236.628469] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.643974] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5220046a-4cff-bcf3-b8be-7fbaf94cc3ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.647081] env[68285]: DEBUG oslo_vmware.api [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892344, 'name': PowerOnVM_Task} progress is 93%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.653506] env[68285]: DEBUG oslo_vmware.api [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892346, 'name': ResetVM_Task, 'duration_secs': 0.128121} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.653847] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Did hard reboot of VM {{(pid=68285) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1236.654083] env[68285]: DEBUG nova.compute.manager [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1236.654987] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ae3c7d-a9c2-464f-8191-89dcc0f88bb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.753658] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.859473] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.860018] env[68285]: DEBUG nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Instance network_info: |[{"id": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "address": "fa:16:3e:2d:b2:41", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdb4de32-3a", "ovs_interfaceid": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1236.860208] env[68285]: DEBUG oslo_concurrency.lockutils [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] Acquired lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.860379] env[68285]: DEBUG nova.network.neutron [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Refreshing network info cache for port cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1236.862338] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:b2:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdb4de32-3a3d-4f10-abb3-9d403cde25c7', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1236.870096] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1236.870976] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2199b56-64bd-4096-b877-e10656b09313] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1236.871814] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4ab790d-9d7a-4f82-8c51-c7b1fcdfa551 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.896025] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1236.896025] env[68285]: value = "task-2892347" [ 1236.896025] env[68285]: _type = "Task" [ 1236.896025] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.906344] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892347, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.939030] env[68285]: DEBUG oslo_vmware.api [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892343, 'name': PowerOnVM_Task, 'duration_secs': 1.665725} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.939030] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1236.939165] env[68285]: INFO nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Took 10.13 seconds to spawn the instance on the hypervisor. [ 1236.939254] env[68285]: DEBUG nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1236.940122] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cc4fbd-67fd-45e6-bed5-8f6c30f874b2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.974839] env[68285]: DEBUG nova.compute.utils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1236.978992] env[68285]: DEBUG nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1236.979243] env[68285]: DEBUG nova.network.neutron [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1237.026948] env[68285]: DEBUG nova.policy [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '557a46b01bbf41e4a343d20c8206aa96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9352aafac6e049feb8d74a91d1600224', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1237.148313] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5220046a-4cff-bcf3-b8be-7fbaf94cc3ba, 'name': SearchDatastore_Task, 'duration_secs': 0.176873} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.153571] env[68285]: DEBUG oslo_concurrency.lockutils [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.153955] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. {{(pid=68285) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1237.154541] env[68285]: DEBUG oslo_vmware.api [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892344, 'name': PowerOnVM_Task, 'duration_secs': 1.573198} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.154899] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffb72b49-621b-4b81-99f4-494e8508a890 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.157230] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Resumed the VM {{(pid=68285) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1237.158035] env[68285]: DEBUG nova.compute.manager [None req-f7728d57-ce9c-4671-9a30-f3aa00c525fc tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1237.158265] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa456ca-6fe8-445c-aa3c-850053530ec3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.171906] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1237.171906] env[68285]: value = "task-2892348" [ 1237.171906] env[68285]: _type = "Task" [ 1237.171906] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.174716] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b4cd797-0012-46ee-b2c9-311010714fe3 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.516s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.184129] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.343855] env[68285]: DEBUG nova.network.neutron [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Successfully created port: d00eb1aa-97af-4a18-9582-416989e71604 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1237.407327] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892347, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.459497] env[68285]: INFO nova.compute.manager [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Took 22.52 seconds to build instance. [ 1237.480717] env[68285]: DEBUG nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1237.681932] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.682384] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.705471] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892348, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.816405] env[68285]: DEBUG nova.network.neutron [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Updated VIF entry in instance network info cache for port cdb4de32-3a3d-4f10-abb3-9d403cde25c7. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1237.816405] env[68285]: DEBUG nova.network.neutron [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Updating instance_info_cache with network_info: [{"id": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "address": "fa:16:3e:2d:b2:41", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdb4de32-3a", "ovs_interfaceid": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.880652] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd827a4-0a60-4613-8f5d-a2193dc1bf37 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.891071] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a67d748-3ee8-4fe7-a470-52ae46ac9154 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.908473] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892347, 'name': CreateVM_Task, 'duration_secs': 0.514063} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.937068] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2199b56-64bd-4096-b877-e10656b09313] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1237.938129] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.938317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.938677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1237.939498] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea8a285-bb08-4c63-87c0-49c135673d7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.942305] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abdc77e6-9cb7-4ea4-8ebf-981426db3e92 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.951495] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1237.951495] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c7cff9-141b-73de-5d5b-b96ee3c5981c" [ 1237.951495] env[68285]: _type = "Task" [ 1237.951495] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.953056] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f769eae5-0a16-4449-a125-e9749caa470f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.961151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bccbd674-6b1a-4c97-94eb-96da1920cb47 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.051s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.971276] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c7cff9-141b-73de-5d5b-b96ee3c5981c, 'name': SearchDatastore_Task, 'duration_secs': 0.017833} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.977770] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.978065] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1237.978334] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.978484] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.978688] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1237.982231] env[68285]: DEBUG nova.compute.provider_tree [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.983603] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d1bf735-fba4-49e4-a92e-c86b5d1b41b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.986140] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40774498-b377-416b-a551-39ff7151cc7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.017619] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance '5abddda1-9bf7-4039-81c7-8622f43cc72e' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1238.023266] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1238.023449] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1238.024228] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a804ebe-e30b-45f9-b958-9fb2a94b10f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.031978] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1238.031978] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fb3fc4-8cbc-7d8d-1a38-120400ed9690" [ 1238.031978] env[68285]: _type = "Task" [ 1238.031978] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.041426] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fb3fc4-8cbc-7d8d-1a38-120400ed9690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.191506] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892348, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.195411] env[68285]: DEBUG nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1238.321117] env[68285]: DEBUG oslo_concurrency.lockutils [req-192d67d4-56f7-4b22-a9f4-104002ab8de2 req-e375fdfb-6745-492f-9576-485444eb1c16 service nova] Releasing lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.493072] env[68285]: DEBUG nova.scheduler.client.report [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.496841] env[68285]: DEBUG nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1238.523585] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1238.524133] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d5b17a3-ccd7-4550-8690-d5f797cd375f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.531298] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1238.531583] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.531796] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1238.532018] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.532201] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1238.532357] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1238.532584] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1238.532742] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1238.532908] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1238.533426] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1238.533426] env[68285]: DEBUG nova.virt.hardware [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1238.534547] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd43d23-3b1b-483b-a110-ce8d63e45c2b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.543980] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1238.543980] env[68285]: value = "task-2892349" [ 1238.543980] env[68285]: _type = "Task" [ 1238.543980] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.554660] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fb3fc4-8cbc-7d8d-1a38-120400ed9690, 'name': SearchDatastore_Task, 'duration_secs': 0.016009} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.556571] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af6999c-a785-45b0-b399-46bac9651b5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.564374] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39e4abf6-b6c1-4b33-bd5e-8e1b497d039e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.566766] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.570440] env[68285]: DEBUG nova.compute.manager [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received event network-changed-1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1238.570721] env[68285]: DEBUG nova.compute.manager [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Refreshing instance network info cache due to event network-changed-1f6dacae-76ce-408a-8e61-deddf144ba68. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1238.571013] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] Acquiring lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.571169] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] Acquired lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.571329] env[68285]: DEBUG nova.network.neutron [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Refreshing network info cache for port 1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1238.584811] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1238.584811] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529c8ec3-1baf-3795-91b9-15d1eb118a33" [ 1238.584811] env[68285]: _type = "Task" [ 1238.584811] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.598310] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529c8ec3-1baf-3795-91b9-15d1eb118a33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.696829] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892348, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.725112] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.931181] env[68285]: DEBUG nova.network.neutron [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Successfully updated port: d00eb1aa-97af-4a18-9582-416989e71604 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1238.998096] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.998096] env[68285]: DEBUG nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1239.003330] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.392s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.003625] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.006606] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.903s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.007094] env[68285]: DEBUG nova.objects.instance [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lazy-loading 'resources' on Instance uuid 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.034490] env[68285]: INFO nova.scheduler.client.report [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted allocations for instance fe8e0a71-e9b0-4035-a696-51455d6fc473 [ 1239.055987] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892349, 'name': PowerOffVM_Task, 'duration_secs': 0.237058} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.056328] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1239.056512] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance '5abddda1-9bf7-4039-81c7-8622f43cc72e' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1239.102219] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529c8ec3-1baf-3795-91b9-15d1eb118a33, 'name': SearchDatastore_Task, 'duration_secs': 0.037024} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.102503] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.102746] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] b2199b56-64bd-4096-b877-e10656b09313/b2199b56-64bd-4096-b877-e10656b09313.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1239.103359] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0fc22d9-9e8e-42b8-a994-82d9a4532dec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.112855] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1239.112855] env[68285]: value = "task-2892350" [ 1239.112855] env[68285]: _type = "Task" [ 1239.112855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.122127] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.193800] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892348, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.709651} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.194105] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. [ 1239.194914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d21534-9f8c-4b1f-b7f9-c1a4f4cccdb8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.221246] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1239.224045] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a7c3745-aed6-47e7-b833-469c9a0fc4ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.245291] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1239.245291] env[68285]: value = "task-2892351" [ 1239.245291] env[68285]: _type = "Task" [ 1239.245291] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.255227] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892351, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.366050] env[68285]: DEBUG nova.network.neutron [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updated VIF entry in instance network info cache for port 1f6dacae-76ce-408a-8e61-deddf144ba68. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1239.366565] env[68285]: DEBUG nova.network.neutron [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.434320] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.434506] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.434859] env[68285]: DEBUG nova.network.neutron [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1239.513856] env[68285]: DEBUG nova.compute.utils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1239.515794] env[68285]: DEBUG nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1239.515993] env[68285]: DEBUG nova.network.neutron [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1239.542403] env[68285]: DEBUG oslo_concurrency.lockutils [None req-bf296da8-0f7f-48a5-9e56-ed00e3f04c67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "fe8e0a71-e9b0-4035-a696-51455d6fc473" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.612s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.563885] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1239.563885] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1239.563885] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1239.564113] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1239.564227] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1239.564380] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1239.564585] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1239.564782] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1239.564966] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1239.565147] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1239.565325] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1239.574793] env[68285]: DEBUG nova.policy [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64faebf5ce1549fe938f12248656d8d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2727048b316143c7bfa2aef4f9b264f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1239.576921] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c22403b-bf35-42df-9d52-30a72ff84393 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.601065] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1239.601065] env[68285]: value = "task-2892352" [ 1239.601065] env[68285]: _type = "Task" [ 1239.601065] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.616450] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.630387] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892350, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.757130] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892351, 'name': ReconfigVM_Task, 'duration_secs': 0.406655} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.757515] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1239.758682] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176c1d08-bf04-4489-abbe-2b14c0e55984 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.788944] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d690dc7a-df31-402f-8dda-1718fb365d2e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.807196] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1239.807196] env[68285]: value = "task-2892353" [ 1239.807196] env[68285]: _type = "Task" [ 1239.807196] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.832164] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892353, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.861661] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b84e1df-8115-4faa-a04a-c05ca1ba1889 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.870210] env[68285]: DEBUG oslo_concurrency.lockutils [req-2f23e1f7-ac5f-44a1-850a-6d8769fe5a1e req-87bf68ef-57f3-44c4-af66-4aa2fc9d3fb8 service nova] Releasing lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.871094] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c783b061-a68b-4363-8aae-d86e402e6a33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.903912] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55b2fa1-e5c1-4b81-8757-7642726fc254 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.912605] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0725cbf6-311a-4143-9744-d7d0bbee97ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.918705] env[68285]: DEBUG nova.network.neutron [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Successfully created port: 4b0deb3b-eaad-4d59-befc-a8a795b1472f {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1239.931177] env[68285]: DEBUG nova.compute.provider_tree [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1239.980709] env[68285]: DEBUG nova.network.neutron [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1240.019417] env[68285]: DEBUG nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1240.113148] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892352, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.127170] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587361} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.127573] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] b2199b56-64bd-4096-b877-e10656b09313/b2199b56-64bd-4096-b877-e10656b09313.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1240.128063] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1240.128427] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a75c1796-0711-4bd1-bf90-7534e6d9f01a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.136690] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1240.136690] env[68285]: value = "task-2892354" [ 1240.136690] env[68285]: _type = "Task" [ 1240.136690] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.151153] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892354, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.255316] env[68285]: DEBUG nova.network.neutron [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance_info_cache with network_info: [{"id": "d00eb1aa-97af-4a18-9582-416989e71604", "address": "fa:16:3e:0f:89:60", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd00eb1aa-97", "ovs_interfaceid": "d00eb1aa-97af-4a18-9582-416989e71604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.318114] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.453318] env[68285]: ERROR nova.scheduler.client.report [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [req-d26cbf2b-b7e3-48c9-8716-152a23444450] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d26cbf2b-b7e3-48c9-8716-152a23444450"}]} [ 1240.473335] env[68285]: DEBUG nova.scheduler.client.report [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1240.487701] env[68285]: DEBUG nova.scheduler.client.report [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1240.487948] env[68285]: DEBUG nova.compute.provider_tree [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1240.499834] env[68285]: DEBUG nova.scheduler.client.report [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1240.519195] env[68285]: DEBUG nova.scheduler.client.report [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1240.599916] env[68285]: DEBUG nova.compute.manager [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Received event network-vif-plugged-d00eb1aa-97af-4a18-9582-416989e71604 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1240.600155] env[68285]: DEBUG oslo_concurrency.lockutils [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] Acquiring lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.600361] env[68285]: DEBUG oslo_concurrency.lockutils [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.600529] env[68285]: DEBUG oslo_concurrency.lockutils [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.600695] env[68285]: DEBUG nova.compute.manager [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] No waiting events found dispatching network-vif-plugged-d00eb1aa-97af-4a18-9582-416989e71604 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1240.600859] env[68285]: WARNING nova.compute.manager [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Received unexpected event network-vif-plugged-d00eb1aa-97af-4a18-9582-416989e71604 for instance with vm_state building and task_state spawning. [ 1240.601027] env[68285]: DEBUG nova.compute.manager [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Received event network-changed-d00eb1aa-97af-4a18-9582-416989e71604 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1240.601190] env[68285]: DEBUG nova.compute.manager [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Refreshing instance network info cache due to event network-changed-d00eb1aa-97af-4a18-9582-416989e71604. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1240.601355] env[68285]: DEBUG oslo_concurrency.lockutils [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] Acquiring lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.612831] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892352, 'name': ReconfigVM_Task, 'duration_secs': 0.582866} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.615310] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance '5abddda1-9bf7-4039-81c7-8622f43cc72e' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1240.648784] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892354, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161654} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.649154] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1240.649996] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bedac3-f4f5-4785-b066-736449ff614e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.673418] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] b2199b56-64bd-4096-b877-e10656b09313/b2199b56-64bd-4096-b877-e10656b09313.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1240.677028] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15eaba86-3084-436f-b033-107a92097956 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.698396] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1240.698396] env[68285]: value = "task-2892355" [ 1240.698396] env[68285]: _type = "Task" [ 1240.698396] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.712090] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892355, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.761782] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.762079] env[68285]: DEBUG nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Instance network_info: |[{"id": "d00eb1aa-97af-4a18-9582-416989e71604", "address": "fa:16:3e:0f:89:60", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd00eb1aa-97", "ovs_interfaceid": "d00eb1aa-97af-4a18-9582-416989e71604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1240.763048] env[68285]: DEBUG oslo_concurrency.lockutils [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] Acquired lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.763048] env[68285]: DEBUG nova.network.neutron [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Refreshing network info cache for port d00eb1aa-97af-4a18-9582-416989e71604 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1240.763942] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:89:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd00eb1aa-97af-4a18-9582-416989e71604', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1240.771159] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1240.771718] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1240.774662] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7eb6698b-998d-4c86-8ab8-02098b3e852a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.796543] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1240.796543] env[68285]: value = "task-2892356" [ 1240.796543] env[68285]: _type = "Task" [ 1240.796543] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.807569] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892356, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.821805] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892353, 'name': ReconfigVM_Task, 'duration_secs': 0.566941} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.822257] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1240.822360] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-157677d1-968e-47f9-9955-4a9c0efaf21e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.827632] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80169bee-6d13-419f-a702-16461f06019a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.831490] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1240.831490] env[68285]: value = "task-2892357" [ 1240.831490] env[68285]: _type = "Task" [ 1240.831490] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.840701] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd312999-6b0d-4a3e-9e71-10c1d10c3492 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.844047] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.873039] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca47a7b1-d892-4786-9040-e04a597baead {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.880547] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b29db0-f8c4-48c1-89eb-fdc2b8069eed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.895199] env[68285]: DEBUG nova.compute.provider_tree [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1241.028853] env[68285]: DEBUG nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1241.054880] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1241.055150] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.055309] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1241.055485] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.055629] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1241.055771] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1241.056010] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1241.056185] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1241.056351] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1241.056508] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1241.056676] env[68285]: DEBUG nova.virt.hardware [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1241.057512] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2077e03f-a511-48a1-a1ca-38d1575a9ce5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.065696] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce20dc5-7e37-4cf2-ba64-5076560b150d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.121251] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1241.121685] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.121939] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1241.122172] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.122335] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1241.122545] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1241.122770] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1241.122959] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1241.123217] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1241.123398] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1241.123586] env[68285]: DEBUG nova.virt.hardware [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1241.129044] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfiguring VM instance instance-00000059 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1241.129736] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfb7ee4f-8f6a-4302-a230-bbbb18e166d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.151338] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1241.151338] env[68285]: value = "task-2892358" [ 1241.151338] env[68285]: _type = "Task" [ 1241.151338] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.160434] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.208765] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.307779] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892356, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.344017] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892357, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.434016] env[68285]: DEBUG nova.scheduler.client.report [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 147 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1241.434567] env[68285]: DEBUG nova.compute.provider_tree [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 147 to 148 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1241.434785] env[68285]: DEBUG nova.compute.provider_tree [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1241.485020] env[68285]: DEBUG nova.network.neutron [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Successfully updated port: 4b0deb3b-eaad-4d59-befc-a8a795b1472f {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1241.643064] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "68aee959-4168-43a7-a8d1-e6e126a52da5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.643314] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.655838] env[68285]: DEBUG nova.network.neutron [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updated VIF entry in instance network info cache for port d00eb1aa-97af-4a18-9582-416989e71604. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1241.657095] env[68285]: DEBUG nova.network.neutron [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance_info_cache with network_info: [{"id": "d00eb1aa-97af-4a18-9582-416989e71604", "address": "fa:16:3e:0f:89:60", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd00eb1aa-97", "ovs_interfaceid": "d00eb1aa-97af-4a18-9582-416989e71604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.668904] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892358, 'name': ReconfigVM_Task, 'duration_secs': 0.248569} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.669357] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfigured VM instance instance-00000059 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1241.670008] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01c6de0-40f6-42bd-90c2-c8797e555bf9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.695203] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1241.695671] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2aa88cac-329a-4e68-877c-dff3d44a033e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.719027] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892355, 'name': ReconfigVM_Task, 'duration_secs': 0.783824} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.720302] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Reconfigured VM instance instance-00000067 to attach disk [datastore1] b2199b56-64bd-4096-b877-e10656b09313/b2199b56-64bd-4096-b877-e10656b09313.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1241.721267] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1241.721267] env[68285]: value = "task-2892359" [ 1241.721267] env[68285]: _type = "Task" [ 1241.721267] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.721484] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b8496e3-610c-4224-9697-be8863d5e4b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.731825] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.733064] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1241.733064] env[68285]: value = "task-2892360" [ 1241.733064] env[68285]: _type = "Task" [ 1241.733064] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.740888] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892360, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.807696] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892356, 'name': CreateVM_Task, 'duration_secs': 0.822074} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.807854] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1241.808570] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.808743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.809142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1241.809417] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c68a209-70d1-4d98-a864-747914c12b65 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.815048] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1241.815048] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5261ef61-6ce3-d610-6077-e2d50b079f58" [ 1241.815048] env[68285]: _type = "Task" [ 1241.815048] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.822645] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5261ef61-6ce3-d610-6077-e2d50b079f58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.841012] env[68285]: DEBUG oslo_vmware.api [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892357, 'name': PowerOnVM_Task, 'duration_secs': 0.686651} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.841278] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1241.844017] env[68285]: DEBUG nova.compute.manager [None req-70da0f1a-2fb9-4696-b3e9-2284b37f7e5f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1241.844730] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f22ed8-ea3c-4479-ac84-ea04a7ebf5bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.940062] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.933s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.942620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.397s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.942918] env[68285]: DEBUG nova.objects.instance [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lazy-loading 'resources' on Instance uuid 49831327-6e13-412e-ab83-bf350e6e9761 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.963753] env[68285]: INFO nova.scheduler.client.report [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Deleted allocations for instance 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc [ 1241.984936] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "refresh_cache-5c0a91a4-b247-4950-8c7c-c62afdc4860f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.985208] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "refresh_cache-5c0a91a4-b247-4950-8c7c-c62afdc4860f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.985299] env[68285]: DEBUG nova.network.neutron [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1242.146263] env[68285]: DEBUG nova.compute.utils [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1242.162266] env[68285]: DEBUG oslo_concurrency.lockutils [req-a98f8260-6214-4f19-844f-0ba514a86a2b req-92a677ad-81b3-4b39-9fd2-e48287b5d082 service nova] Releasing lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.234724] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892359, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.243493] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892360, 'name': Rename_Task, 'duration_secs': 0.215866} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.243493] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1242.243691] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0dc6b15-95fc-4f09-ac78-3b6f9e4da379 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.250746] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1242.250746] env[68285]: value = "task-2892361" [ 1242.250746] env[68285]: _type = "Task" [ 1242.250746] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.260468] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.328501] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5261ef61-6ce3-d610-6077-e2d50b079f58, 'name': SearchDatastore_Task, 'duration_secs': 0.023438} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.328896] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.329135] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1242.329441] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.329656] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.329906] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1242.330209] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-042971a3-813b-4fa6-a8ff-c697f87c2c4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.341642] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1242.341915] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1242.342742] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0a8f18a-9be9-40a5-b5cf-a7c0b150a7e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.349222] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1242.349222] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52398891-42eb-923f-4a86-5139675c8e0e" [ 1242.349222] env[68285]: _type = "Task" [ 1242.349222] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.364148] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52398891-42eb-923f-4a86-5139675c8e0e, 'name': SearchDatastore_Task, 'duration_secs': 0.012261} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.365065] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ac55512-64c7-4853-b309-8ec192248631 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.371978] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1242.371978] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527066e5-5a0e-352c-49e4-815d38a52bd6" [ 1242.371978] env[68285]: _type = "Task" [ 1242.371978] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.382694] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527066e5-5a0e-352c-49e4-815d38a52bd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.475379] env[68285]: DEBUG oslo_concurrency.lockutils [None req-87f576d0-5062-4b51-9076-c50342184e4d tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "8fd23cb4-45da-4bd9-a258-845eb3f6a1dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.904s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.547091] env[68285]: DEBUG nova.network.neutron [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1242.649145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.739160] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892359, 'name': ReconfigVM_Task, 'duration_secs': 0.721578} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.743435] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.743932] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance '5abddda1-9bf7-4039-81c7-8622f43cc72e' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1242.760639] env[68285]: DEBUG nova.network.neutron [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Updating instance_info_cache with network_info: [{"id": "4b0deb3b-eaad-4d59-befc-a8a795b1472f", "address": "fa:16:3e:af:a6:f8", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b0deb3b-ea", "ovs_interfaceid": "4b0deb3b-eaad-4d59-befc-a8a795b1472f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.762244] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "589d1560-9269-4de2-bd79-454ebdaa40d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.762560] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.762806] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "589d1560-9269-4de2-bd79-454ebdaa40d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.763034] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.763232] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.772313] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e08c9a-03b2-4a13-b27b-b4cbbee3866e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.775968] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892361, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.776673] env[68285]: INFO nova.compute.manager [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Terminating instance [ 1242.787058] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4487d089-ea50-468b-83f2-ed86b281b5d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.825853] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4187a3c-8dc7-43a0-9857-a2507f8ecdba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.841641] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781c9476-fb18-403e-b4f0-b6903709f3f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.860192] env[68285]: DEBUG nova.compute.provider_tree [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.888032] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527066e5-5a0e-352c-49e4-815d38a52bd6, 'name': SearchDatastore_Task, 'duration_secs': 0.016965} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.888032] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.888032] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d4818c98-8134-4426-bd35-b2339ed6abd4/d4818c98-8134-4426-bd35-b2339ed6abd4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1242.888869] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-879aacd6-4c01-4fa9-ac35-ae7b7aad4ee2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.900158] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1242.900158] env[68285]: value = "task-2892362" [ 1242.900158] env[68285]: _type = "Task" [ 1242.900158] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.914667] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.072164] env[68285]: DEBUG nova.compute.manager [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Received event network-vif-plugged-4b0deb3b-eaad-4d59-befc-a8a795b1472f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1243.072403] env[68285]: DEBUG oslo_concurrency.lockutils [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] Acquiring lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.072611] env[68285]: DEBUG oslo_concurrency.lockutils [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.072821] env[68285]: DEBUG oslo_concurrency.lockutils [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.072940] env[68285]: DEBUG nova.compute.manager [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] No waiting events found dispatching network-vif-plugged-4b0deb3b-eaad-4d59-befc-a8a795b1472f {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1243.073121] env[68285]: WARNING nova.compute.manager [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Received unexpected event network-vif-plugged-4b0deb3b-eaad-4d59-befc-a8a795b1472f for instance with vm_state building and task_state spawning. [ 1243.073339] env[68285]: DEBUG nova.compute.manager [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Received event network-changed-4b0deb3b-eaad-4d59-befc-a8a795b1472f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1243.073426] env[68285]: DEBUG nova.compute.manager [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Refreshing instance network info cache due to event network-changed-4b0deb3b-eaad-4d59-befc-a8a795b1472f. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1243.073590] env[68285]: DEBUG oslo_concurrency.lockutils [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] Acquiring lock "refresh_cache-5c0a91a4-b247-4950-8c7c-c62afdc4860f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.254349] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f3a459-68cf-41f9-bd60-71a54d3eeabf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.267827] env[68285]: DEBUG oslo_vmware.api [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892361, 'name': PowerOnVM_Task, 'duration_secs': 0.870542} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.283944] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1243.284279] env[68285]: INFO nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1243.284511] env[68285]: DEBUG nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1243.285384] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "refresh_cache-5c0a91a4-b247-4950-8c7c-c62afdc4860f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.285745] env[68285]: DEBUG nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Instance network_info: |[{"id": "4b0deb3b-eaad-4d59-befc-a8a795b1472f", "address": "fa:16:3e:af:a6:f8", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b0deb3b-ea", "ovs_interfaceid": "4b0deb3b-eaad-4d59-befc-a8a795b1472f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1243.287040] env[68285]: DEBUG nova.compute.manager [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1243.287040] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1243.287429] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f5d4bc-99fa-44e9-aa80-2119343d708d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.290153] env[68285]: DEBUG oslo_concurrency.lockutils [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] Acquired lock "refresh_cache-5c0a91a4-b247-4950-8c7c-c62afdc4860f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.290335] env[68285]: DEBUG nova.network.neutron [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Refreshing network info cache for port 4b0deb3b-eaad-4d59-befc-a8a795b1472f {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1243.291727] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:a6:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82dbbfe2-640b-433f-a8e9-1566bd40fb34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b0deb3b-eaad-4d59-befc-a8a795b1472f', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.299114] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1243.300281] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74493758-4042-449f-9445-3d463f128798 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.303961] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001a19c6-4238-4c0a-9a16-3f8af881e15d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.307727] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1243.308417] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4d37c17-8d7f-4b8f-b61e-03b70ccc968a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.352165] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1243.352675] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.352958] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.353200] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.353398] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.353583] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.355989] env[68285]: INFO nova.compute.manager [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Terminating instance [ 1243.359171] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance '5abddda1-9bf7-4039-81c7-8622f43cc72e' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1243.365379] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c0c6748-ab60-46e1-9439-5c348f97079b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.367963] env[68285]: DEBUG nova.scheduler.client.report [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1243.371961] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1243.371961] env[68285]: value = "task-2892363" [ 1243.371961] env[68285]: _type = "Task" [ 1243.371961] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.381919] env[68285]: DEBUG oslo_vmware.api [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1243.381919] env[68285]: value = "task-2892364" [ 1243.381919] env[68285]: _type = "Task" [ 1243.381919] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.385604] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892363, 'name': CreateVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.396899] env[68285]: DEBUG oslo_vmware.api [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892364, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.413528] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892362, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.727262] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "68aee959-4168-43a7-a8d1-e6e126a52da5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.728068] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.728068] env[68285]: INFO nova.compute.manager [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Attaching volume 9bd4b699-6f0e-4337-bda4-c670e0842b41 to /dev/sdb [ 1243.781114] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f6f6cc-7263-4bbb-b57a-bb64b40538c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.789034] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b307a92-5d1e-495b-a76c-3bb8d9a8e9ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.806336] env[68285]: DEBUG nova.virt.block_device [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updating existing volume attachment record: 1feaeacc-85a7-420e-8d2d-a0023a7f551e {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1243.870804] env[68285]: DEBUG nova.compute.manager [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1243.871100] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1243.880792] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c41f6fa-7c53-4c0c-bfc4-3ff11c9bcec3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.899821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.957s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.903339] env[68285]: INFO nova.compute.manager [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Took 23.31 seconds to build instance. [ 1243.904494] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.151s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.906026] env[68285]: INFO nova.compute.claims [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1243.934025] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1243.934025] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e2ae733-0b67-4a04-bf4f-778b5178b35e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.947402] env[68285]: DEBUG oslo_vmware.api [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892364, 'name': PowerOffVM_Task, 'duration_secs': 0.377463} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.947613] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892363, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.947980] env[68285]: INFO nova.compute.manager [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Unrescuing [ 1243.948266] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.948404] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.948920] env[68285]: DEBUG nova.network.neutron [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1243.950417] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892362, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.951364] env[68285]: INFO nova.scheduler.client.report [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Deleted allocations for instance 49831327-6e13-412e-ab83-bf350e6e9761 [ 1243.952865] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1243.953068] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1243.955978] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b97d3e12-8e59-43b0-abd8-77f3c5ca8a4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.961710] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1243.961710] env[68285]: value = "task-2892365" [ 1243.961710] env[68285]: _type = "Task" [ 1243.961710] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.979019] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.083196] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1244.083498] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1244.083685] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Deleting the datastore file [datastore1] 589d1560-9269-4de2-bd79-454ebdaa40d4 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.084033] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5025b2c-f8eb-4720-9aa0-29e6077bf978 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.095781] env[68285]: DEBUG oslo_vmware.api [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for the task: (returnval){ [ 1244.095781] env[68285]: value = "task-2892368" [ 1244.095781] env[68285]: _type = "Task" [ 1244.095781] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.104360] env[68285]: DEBUG oslo_vmware.api [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.309030] env[68285]: DEBUG nova.network.neutron [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Updated VIF entry in instance network info cache for port 4b0deb3b-eaad-4d59-befc-a8a795b1472f. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1244.309652] env[68285]: DEBUG nova.network.neutron [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Updating instance_info_cache with network_info: [{"id": "4b0deb3b-eaad-4d59-befc-a8a795b1472f", "address": "fa:16:3e:af:a6:f8", "network": {"id": "d8352756-7d82-4904-8754-34837bd7272f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1740810441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2727048b316143c7bfa2aef4f9b264f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82dbbfe2-640b-433f-a8e9-1566bd40fb34", "external-id": "nsx-vlan-transportzone-625", "segmentation_id": 625, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b0deb3b-ea", "ovs_interfaceid": "4b0deb3b-eaad-4d59-befc-a8a795b1472f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.416665] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892363, 'name': CreateVM_Task, 'duration_secs': 0.725099} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.416665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1244.417350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.417525] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.417914] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1244.418210] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2165ee5-f49b-474e-8c44-41d9c9ade77b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.426158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-498dd05e-368e-4a48-8c18-fe6c1e2766c6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "b2199b56-64bd-4096-b877-e10656b09313" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.845s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.426375] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892362, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.430737] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1244.430737] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e09b31-a61e-5cc2-2f6c-b3d5279539d3" [ 1244.430737] env[68285]: _type = "Task" [ 1244.430737] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.440021] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e09b31-a61e-5cc2-2f6c-b3d5279539d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.461318] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8971e865-2cfd-4be3-b27d-16a0b4b94941 tempest-ServerAddressesTestJSON-488695317 tempest-ServerAddressesTestJSON-488695317-project-member] Lock "49831327-6e13-412e-ab83-bf350e6e9761" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.776s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.469665] env[68285]: DEBUG nova.compute.manager [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Received event network-changed-cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.469862] env[68285]: DEBUG nova.compute.manager [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Refreshing instance network info cache due to event network-changed-cdb4de32-3a3d-4f10-abb3-9d403cde25c7. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1244.470077] env[68285]: DEBUG oslo_concurrency.lockutils [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] Acquiring lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.470223] env[68285]: DEBUG oslo_concurrency.lockutils [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] Acquired lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.470385] env[68285]: DEBUG nova.network.neutron [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Refreshing network info cache for port cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1244.475350] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.604946] env[68285]: DEBUG oslo_vmware.api [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Task: {'id': task-2892368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327781} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.605240] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1244.605431] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1244.605610] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1244.605781] env[68285]: INFO nova.compute.manager [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1244.606035] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1244.606228] env[68285]: DEBUG nova.compute.manager [-] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1244.606391] env[68285]: DEBUG nova.network.neutron [-] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1244.815814] env[68285]: DEBUG oslo_concurrency.lockutils [req-91ea41c7-a2b8-4078-ae49-c9f3672684af req-214874c7-e95c-40cf-98f8-b0ec92316e87 service nova] Releasing lock "refresh_cache-5c0a91a4-b247-4950-8c7c-c62afdc4860f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.922355] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892362, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.849342} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.923177] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] d4818c98-8134-4426-bd35-b2339ed6abd4/d4818c98-8134-4426-bd35-b2339ed6abd4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1244.923438] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1244.923784] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-228ad236-aa4a-4815-8906-392b5ef0fa3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.937244] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1244.937244] env[68285]: value = "task-2892369" [ 1244.937244] env[68285]: _type = "Task" [ 1244.937244] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.948869] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e09b31-a61e-5cc2-2f6c-b3d5279539d3, 'name': SearchDatastore_Task, 'duration_secs': 0.084223} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.952640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.952913] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1244.953236] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.953413] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.953645] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.954487] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.954898] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9696f12-43c0-4d6e-a56c-11ecbab04368 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.969214] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.969451] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1244.970235] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc9b41c6-5680-4646-a166-efd7e4641e08 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.983553] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1244.983553] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229fc46-ef89-38ec-0d50-d5278972aa10" [ 1244.983553] env[68285]: _type = "Task" [ 1244.983553] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.983845] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892365, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.999642] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5229fc46-ef89-38ec-0d50-d5278972aa10, 'name': SearchDatastore_Task, 'duration_secs': 0.01336} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.003952] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eef9019-5d07-4fa5-8da6-563d0c982b26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.010724] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1245.010724] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]523dcac7-b6ba-a653-8159-d2f4b1680c32" [ 1245.010724] env[68285]: _type = "Task" [ 1245.010724] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.027261] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523dcac7-b6ba-a653-8159-d2f4b1680c32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.030404] env[68285]: DEBUG nova.network.neutron [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Updating instance_info_cache with network_info: [{"id": "1d10105d-1754-49c2-9593-7de22107732e", "address": "fa:16:3e:38:96:08", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10105d-17", "ovs_interfaceid": "1d10105d-1754-49c2-9593-7de22107732e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.102933] env[68285]: DEBUG nova.compute.manager [req-469d0bea-db2a-4e6f-9b59-bc59750246b8 req-01faa171-8083-4291-9329-308eb8ed8599 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Received event network-vif-deleted-c698959a-27f2-4b51-ab9c-83564bfc6e47 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1245.103644] env[68285]: INFO nova.compute.manager [req-469d0bea-db2a-4e6f-9b59-bc59750246b8 req-01faa171-8083-4291-9329-308eb8ed8599 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Neutron deleted interface c698959a-27f2-4b51-ab9c-83564bfc6e47; detaching it from the instance and deleting it from the info cache [ 1245.104160] env[68285]: DEBUG nova.network.neutron [req-469d0bea-db2a-4e6f-9b59-bc59750246b8 req-01faa171-8083-4291-9329-308eb8ed8599 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.136118] env[68285]: DEBUG nova.network.neutron [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Port 10900535-c864-4616-a243-0798b3cdb70a binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1245.255764] env[68285]: DEBUG nova.network.neutron [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Updated VIF entry in instance network info cache for port cdb4de32-3a3d-4f10-abb3-9d403cde25c7. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1245.256120] env[68285]: DEBUG nova.network.neutron [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Updating instance_info_cache with network_info: [{"id": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "address": "fa:16:3e:2d:b2:41", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdb4de32-3a", "ovs_interfaceid": "cdb4de32-3a3d-4f10-abb3-9d403cde25c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.341583] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f51614-bc81-4c62-90b7-397a0c839365 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.352406] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bd7dd0-8717-4dc7-b170-f27e6d5bad28 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.389486] env[68285]: DEBUG nova.network.neutron [-] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.391787] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a494e677-7396-4d99-ba6f-4bf3f2062932 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.401026] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1e8d68-9f5b-4747-8167-9c5c5d3d3562 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.423397] env[68285]: DEBUG nova.compute.provider_tree [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.452271] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090568} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.452598] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1245.453727] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c713ed9c-736d-4626-aa2d-48eb91eff4dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.488952] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] d4818c98-8134-4426-bd35-b2339ed6abd4/d4818c98-8134-4426-bd35-b2339ed6abd4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1245.494780] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-539fbcc8-68e8-4a3e-bc32-a2e0ef74a72d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.525997] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892365, 'name': PowerOffVM_Task, 'duration_secs': 1.086515} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.528159] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1245.528159] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1245.528159] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8d60a2d-7805-482e-aa45-6ef890b45162 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.534087] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1245.534087] env[68285]: value = "task-2892370" [ 1245.534087] env[68285]: _type = "Task" [ 1245.534087] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.534540] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-801f524e-28b5-4452-b880-0fc30d3c5eef" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.535221] env[68285]: DEBUG nova.objects.instance [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lazy-loading 'flavor' on Instance uuid 801f524e-28b5-4452-b880-0fc30d3c5eef {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1245.536858] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]523dcac7-b6ba-a653-8159-d2f4b1680c32, 'name': SearchDatastore_Task, 'duration_secs': 0.01433} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.537402] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.537698] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5c0a91a4-b247-4950-8c7c-c62afdc4860f/5c0a91a4-b247-4950-8c7c-c62afdc4860f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1245.543053] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4260b863-76a6-45ea-a487-edee8018244a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.550549] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.553760] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1245.553760] env[68285]: value = "task-2892372" [ 1245.553760] env[68285]: _type = "Task" [ 1245.553760] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.566212] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.606891] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-334529c6-61f5-4f84-8861-cb1c07708301 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.619380] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6356c3-6d6b-4364-a6f7-0346e3ad0876 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.631734] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1245.631975] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1245.632213] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleting the datastore file [datastore2] be47df2a-aee7-4275-9acb-9cf74367f503 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1245.632977] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b11cd7c5-bb86-460e-89da-7da26f9b53a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.644823] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for the task: (returnval){ [ 1245.644823] env[68285]: value = "task-2892373" [ 1245.644823] env[68285]: _type = "Task" [ 1245.644823] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.661630] env[68285]: DEBUG nova.compute.manager [req-469d0bea-db2a-4e6f-9b59-bc59750246b8 req-01faa171-8083-4291-9329-308eb8ed8599 service nova] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Detach interface failed, port_id=c698959a-27f2-4b51-ab9c-83564bfc6e47, reason: Instance 589d1560-9269-4de2-bd79-454ebdaa40d4 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1245.668838] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.760522] env[68285]: DEBUG oslo_concurrency.lockutils [req-03fece69-dfe8-4543-9de1-8fe6dff029eb req-4160c83e-5735-4bbb-8118-ee53c3d2bcf9 service nova] Releasing lock "refresh_cache-b2199b56-64bd-4096-b877-e10656b09313" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.897102] env[68285]: INFO nova.compute.manager [-] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Took 1.29 seconds to deallocate network for instance. [ 1245.929021] env[68285]: DEBUG nova.scheduler.client.report [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.046549] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c65dd1-818a-40f3-bd3b-2168de6a088e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.062030] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.083866] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.084732] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9891267-17f8-4187-af8c-0185d4410b30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.091304] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506748} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.092943] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 5c0a91a4-b247-4950-8c7c-c62afdc4860f/5c0a91a4-b247-4950-8c7c-c62afdc4860f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1246.093226] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1246.093557] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1246.093557] env[68285]: value = "task-2892376" [ 1246.093557] env[68285]: _type = "Task" [ 1246.093557] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.093754] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8384e8c-3eb4-4e59-bd5d-2fbe9c673681 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.108221] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892376, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.109687] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1246.109687] env[68285]: value = "task-2892377" [ 1246.109687] env[68285]: _type = "Task" [ 1246.109687] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.118734] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892377, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.171730] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.172028] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.172171] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.180405] env[68285]: DEBUG oslo_vmware.api [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Task: {'id': task-2892373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323953} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.181293] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1246.181969] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1246.182217] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1246.182398] env[68285]: INFO nova.compute.manager [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1246.182781] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1246.182961] env[68285]: DEBUG nova.compute.manager [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1246.183078] env[68285]: DEBUG nova.network.neutron [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1246.407221] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.435035] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.435035] env[68285]: DEBUG nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1246.437196] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.712s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.438800] env[68285]: INFO nova.compute.claims [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.552688] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892370, 'name': ReconfigVM_Task, 'duration_secs': 0.622501} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.552975] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Reconfigured VM instance instance-00000068 to attach disk [datastore1] d4818c98-8134-4426-bd35-b2339ed6abd4/d4818c98-8134-4426-bd35-b2339ed6abd4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1246.553639] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1125a64-f83b-4678-bdc4-eb4cb88eee39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.561599] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1246.561599] env[68285]: value = "task-2892378" [ 1246.561599] env[68285]: _type = "Task" [ 1246.561599] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.572019] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892378, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.605418] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892376, 'name': PowerOffVM_Task, 'duration_secs': 0.28138} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.605582] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1246.611121] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1246.611527] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cc3ea6d-3b6b-46d9-aae0-d11f6b61dbc7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.636977] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892377, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080758} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.638295] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1246.638651] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1246.638651] env[68285]: value = "task-2892379" [ 1246.638651] env[68285]: _type = "Task" [ 1246.638651] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.639348] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab1b350-bab2-40b2-8aa6-27b26013d9ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.671059] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 5c0a91a4-b247-4950-8c7c-c62afdc4860f/5c0a91a4-b247-4950-8c7c-c62afdc4860f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1246.671282] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892379, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.671588] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25253394-8f18-4376-8e51-356d2fa5a2d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.696768] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1246.696768] env[68285]: value = "task-2892380" [ 1246.696768] env[68285]: _type = "Task" [ 1246.696768] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.707870] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892380, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.923641] env[68285]: DEBUG nova.network.neutron [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.939281] env[68285]: DEBUG nova.compute.utils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1246.943976] env[68285]: DEBUG nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1246.943976] env[68285]: DEBUG nova.network.neutron [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1246.992790] env[68285]: DEBUG nova.policy [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '184360cab7224b9eaef80dfe89d0208b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288595d9298e43fa859bc6b68054aa08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1247.075263] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892378, 'name': Rename_Task, 'duration_secs': 0.147829} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.075577] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1247.075832] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79749608-74f3-4cdc-920e-ff4425eee81c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.083655] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1247.083655] env[68285]: value = "task-2892381" [ 1247.083655] env[68285]: _type = "Task" [ 1247.083655] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.092249] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.134162] env[68285]: DEBUG nova.compute.manager [req-a2e5b8dc-0e43-4574-9c31-14c3c01aac20 req-35e67e8c-5469-4f90-ab45-f23663481a0f service nova] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Received event network-vif-deleted-724df450-925b-47ae-884b-4935b5b95ab2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1247.152878] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892379, 'name': ReconfigVM_Task, 'duration_secs': 0.249556} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.153179] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1247.153368] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1247.153616] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a0d5572-dff9-442f-85f5-2c4d5f595075 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.163529] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1247.163529] env[68285]: value = "task-2892382" [ 1247.163529] env[68285]: _type = "Task" [ 1247.163529] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.174527] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892382, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.211548] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892380, 'name': ReconfigVM_Task, 'duration_secs': 0.306287} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.211548] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 5c0a91a4-b247-4950-8c7c-c62afdc4860f/5c0a91a4-b247-4950-8c7c-c62afdc4860f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1247.211548] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c115b44b-f448-4d49-a5f1-91bc3e389be1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.222651] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1247.222651] env[68285]: value = "task-2892383" [ 1247.222651] env[68285]: _type = "Task" [ 1247.222651] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.233446] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892383, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.270912] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.274114] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.274114] env[68285]: DEBUG nova.network.neutron [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1247.426751] env[68285]: INFO nova.compute.manager [-] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Took 1.24 seconds to deallocate network for instance. [ 1247.446280] env[68285]: DEBUG nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1247.479681] env[68285]: DEBUG nova.network.neutron [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Successfully created port: 2c6c4fc0-06b2-415e-b994-692b79103ce0 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1247.598262] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892381, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.676297] env[68285]: DEBUG oslo_vmware.api [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892382, 'name': PowerOnVM_Task, 'duration_secs': 0.436624} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.676881] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1247.677424] env[68285]: DEBUG nova.compute.manager [None req-0dcec9db-04a5-410d-838c-b76a56f30663 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1247.678916] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0382ad2-d151-49d9-b9fa-dc39a274fea8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.728855] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b67ecc-f474-4ca0-9b69-7909e44712ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.738404] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892383, 'name': Rename_Task, 'duration_secs': 0.195521} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.739898] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1247.740214] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a750219-654a-4899-8541-ef35723f9149 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.742465] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f5c88e-23d8-4a67-8913-cb899f8fd4cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.778762] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87258a5b-e6a0-4a1b-822b-2483bbfe5052 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.788160] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1247.788160] env[68285]: value = "task-2892384" [ 1247.788160] env[68285]: _type = "Task" [ 1247.788160] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.793010] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0cf0ca-f4e1-4a79-a346-396173feaf80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.800402] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892384, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.816220] env[68285]: DEBUG nova.compute.provider_tree [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.938357] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.052308] env[68285]: DEBUG nova.network.neutron [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.096224] env[68285]: DEBUG oslo_vmware.api [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892381, 'name': PowerOnVM_Task, 'duration_secs': 0.544745} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.096224] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1248.096354] env[68285]: INFO nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Took 9.60 seconds to spawn the instance on the hypervisor. [ 1248.096763] env[68285]: DEBUG nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1248.097505] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0428150-968d-4bdd-8a96-aeaf9475aa5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.297776] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892384, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.320013] env[68285]: DEBUG nova.scheduler.client.report [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.360636] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1248.360984] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581062', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'name': 'volume-9bd4b699-6f0e-4337-bda4-c670e0842b41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68aee959-4168-43a7-a8d1-e6e126a52da5', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'serial': '9bd4b699-6f0e-4337-bda4-c670e0842b41'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1248.362078] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b4a5d2-badb-419e-8bb6-d0d85b3e1f1e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.382874] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6913f5-9d2d-4901-a023-7d762110c99c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.415046] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] volume-9bd4b699-6f0e-4337-bda4-c670e0842b41/volume-9bd4b699-6f0e-4337-bda4-c670e0842b41.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.415046] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bac7587-9437-4ab0-a1a3-a0a0915f7117 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.436605] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1248.436605] env[68285]: value = "task-2892385" [ 1248.436605] env[68285]: _type = "Task" [ 1248.436605] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.447601] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892385, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.461574] env[68285]: DEBUG nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1248.489714] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1248.489971] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1248.490147] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1248.490332] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1248.490481] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1248.490638] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1248.490852] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1248.491035] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1248.491209] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1248.491384] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1248.491595] env[68285]: DEBUG nova.virt.hardware [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1248.492593] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2577fe4-c487-439f-9866-701f8765f233 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.501766] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae56512-a28b-4889-9a97-5391272043bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.556178] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.617010] env[68285]: INFO nova.compute.manager [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Took 27.22 seconds to build instance. [ 1248.796356] env[68285]: DEBUG oslo_vmware.api [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892384, 'name': PowerOnVM_Task, 'duration_secs': 0.88601} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.796989] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1248.796989] env[68285]: INFO nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Took 7.77 seconds to spawn the instance on the hypervisor. [ 1248.796989] env[68285]: DEBUG nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1248.797793] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4d3d1f-6255-4856-98b7-7206665fd26e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.825957] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.826524] env[68285]: DEBUG nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1248.829081] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.422s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.829321] env[68285]: DEBUG nova.objects.instance [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lazy-loading 'resources' on Instance uuid 589d1560-9269-4de2-bd79-454ebdaa40d4 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1248.952094] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.069433] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97535325-68d5-446c-aea7-c9c2444ca15a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.082059] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d49aff-88ea-470c-9be5-67efa6d3f14b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.120446] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aaf2a4a7-f184-4cf8-832c-a29571613671 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.730s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.131372] env[68285]: DEBUG nova.network.neutron [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Successfully updated port: 2c6c4fc0-06b2-415e-b994-692b79103ce0 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1249.177010] env[68285]: DEBUG nova.compute.manager [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Received event network-vif-plugged-2c6c4fc0-06b2-415e-b994-692b79103ce0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1249.177241] env[68285]: DEBUG oslo_concurrency.lockutils [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] Acquiring lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.177433] env[68285]: DEBUG oslo_concurrency.lockutils [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.177599] env[68285]: DEBUG oslo_concurrency.lockutils [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.177804] env[68285]: DEBUG nova.compute.manager [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] No waiting events found dispatching network-vif-plugged-2c6c4fc0-06b2-415e-b994-692b79103ce0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1249.177940] env[68285]: WARNING nova.compute.manager [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Received unexpected event network-vif-plugged-2c6c4fc0-06b2-415e-b994-692b79103ce0 for instance with vm_state building and task_state spawning. [ 1249.178576] env[68285]: DEBUG nova.compute.manager [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Received event network-changed-2c6c4fc0-06b2-415e-b994-692b79103ce0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1249.178576] env[68285]: DEBUG nova.compute.manager [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Refreshing instance network info cache due to event network-changed-2c6c4fc0-06b2-415e-b994-692b79103ce0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1249.178576] env[68285]: DEBUG oslo_concurrency.lockutils [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] Acquiring lock "refresh_cache-a4fc942a-03e7-4415-bd95-f1f0e1344a69" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.178730] env[68285]: DEBUG oslo_concurrency.lockutils [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] Acquired lock "refresh_cache-a4fc942a-03e7-4415-bd95-f1f0e1344a69" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.178814] env[68285]: DEBUG nova.network.neutron [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Refreshing network info cache for port 2c6c4fc0-06b2-415e-b994-692b79103ce0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1249.315624] env[68285]: INFO nova.compute.manager [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Took 18.79 seconds to build instance. [ 1249.332943] env[68285]: DEBUG nova.compute.utils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1249.338618] env[68285]: DEBUG nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1249.338872] env[68285]: DEBUG nova.network.neutron [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1249.411061] env[68285]: DEBUG nova.policy [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27d9dcb055384598a0457adc7e8e1d99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ac9328b3c844571a4913db87fcdf383', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1249.452327] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892385, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.486156] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.487520] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.602400] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7c6e02-454c-460b-813e-865683f9555a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.611428] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6b93ac-601b-4748-a73a-6b064b890fc2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.645201] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "refresh_cache-a4fc942a-03e7-4415-bd95-f1f0e1344a69" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.650288] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9faacaa7-c4ca-4d20-9486-07d75ffede02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.660972] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75930126-04d5-46a8-a8cd-c577aa9107e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.676038] env[68285]: DEBUG nova.compute.provider_tree [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1249.724760] env[68285]: DEBUG nova.network.neutron [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1249.753871] env[68285]: DEBUG nova.network.neutron [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Successfully created port: c34d5088-84d9-4ff5-97b4-906e64be2921 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1249.818328] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c686d199-5e39-45ed-ac2a-efb1518e8b67 tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.327s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.838776] env[68285]: DEBUG nova.network.neutron [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.842516] env[68285]: DEBUG nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1249.954618] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892385, 'name': ReconfigVM_Task, 'duration_secs': 1.086423} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.955626] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Reconfigured VM instance instance-00000060 to attach disk [datastore1] volume-9bd4b699-6f0e-4337-bda4-c670e0842b41/volume-9bd4b699-6f0e-4337-bda4-c670e0842b41.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.961389] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-300805f1-8093-4a94-80de-5b47c3d00b8c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.983070] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1249.983070] env[68285]: value = "task-2892387" [ 1249.983070] env[68285]: _type = "Task" [ 1249.983070] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.998570] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892387, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.999168] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.999472] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.999802] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.000308] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.000611] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.000980] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.001514] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1250.001898] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.200964] env[68285]: ERROR nova.scheduler.client.report [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] [req-f01de6bf-1ae5-4477-8860-29b65f3dd02e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f01de6bf-1ae5-4477-8860-29b65f3dd02e"}]} [ 1250.207087] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d322183f-dcc7-4b0e-9f16-780df224a7fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.232148] env[68285]: DEBUG nova.scheduler.client.report [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1250.234757] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143da084-01f9-4984-ba27-6e3cdf0c438c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.243338] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance '5abddda1-9bf7-4039-81c7-8622f43cc72e' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1250.254502] env[68285]: DEBUG nova.scheduler.client.report [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1250.254727] env[68285]: DEBUG nova.compute.provider_tree [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1250.277755] env[68285]: DEBUG nova.scheduler.client.report [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1250.300530] env[68285]: DEBUG nova.scheduler.client.report [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1250.344365] env[68285]: DEBUG oslo_concurrency.lockutils [req-2dbdb63d-accb-447f-be51-51878878143e req-f9d64548-6403-443a-a1ba-84b3ad2e558f service nova] Releasing lock "refresh_cache-a4fc942a-03e7-4415-bd95-f1f0e1344a69" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.348090] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "refresh_cache-a4fc942a-03e7-4415-bd95-f1f0e1344a69" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.354015] env[68285]: DEBUG nova.network.neutron [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1250.362018] env[68285]: DEBUG nova.compute.manager [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1250.492563] env[68285]: DEBUG oslo_vmware.api [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892387, 'name': ReconfigVM_Task, 'duration_secs': 0.231388} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.492872] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581062', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'name': 'volume-9bd4b699-6f0e-4337-bda4-c670e0842b41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68aee959-4168-43a7-a8d1-e6e126a52da5', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'serial': '9bd4b699-6f0e-4337-bda4-c670e0842b41'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1250.509119] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.581855] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6f5121-151c-4a4a-b664-fde7594f4ac8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.591618] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83817247-c3a6-42cd-9594-6270255bb1cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.626644] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ace576f-dd56-4e21-bb02-bb92af3c18f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.635011] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fa4347-6b8f-48c7-bec5-6790ee11be5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.651693] env[68285]: DEBUG nova.compute.provider_tree [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1250.753137] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.753457] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11fa7cb6-66ff-4eed-aa7b-d8bba485e848 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.762600] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1250.762600] env[68285]: value = "task-2892389" [ 1250.762600] env[68285]: _type = "Task" [ 1250.762600] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.771569] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.851018] env[68285]: DEBUG nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1250.884404] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.889165] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1250.889421] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.889641] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.889827] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.890030] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.890166] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1250.890354] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1250.890516] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1250.890682] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1250.890846] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1250.891033] env[68285]: DEBUG nova.virt.hardware [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1250.891951] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4266de6-0fb5-4a91-af09-5b22bb91b729 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.895623] env[68285]: DEBUG nova.network.neutron [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1250.909327] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f4a98-e2cd-4d71-83d8-38a761188618 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.079190] env[68285]: DEBUG nova.network.neutron [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Updating instance_info_cache with network_info: [{"id": "2c6c4fc0-06b2-415e-b994-692b79103ce0", "address": "fa:16:3e:a0:90:ef", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6c4fc0-06", "ovs_interfaceid": "2c6c4fc0-06b2-415e-b994-692b79103ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.189070] env[68285]: DEBUG nova.scheduler.client.report [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1251.189369] env[68285]: DEBUG nova.compute.provider_tree [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 150 to 151 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1251.189934] env[68285]: DEBUG nova.compute.provider_tree [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1251.274074] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892389, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.403243] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.403324] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.404059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.404059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.404059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.406398] env[68285]: INFO nova.compute.manager [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Terminating instance [ 1251.535050] env[68285]: DEBUG nova.compute.manager [req-8a6199fb-a137-4b8d-8066-99cb8665bc39 req-f251e83e-e8ee-4a4a-a7cf-6913933b07c1 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Received event network-vif-plugged-c34d5088-84d9-4ff5-97b4-906e64be2921 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1251.535409] env[68285]: DEBUG oslo_concurrency.lockutils [req-8a6199fb-a137-4b8d-8066-99cb8665bc39 req-f251e83e-e8ee-4a4a-a7cf-6913933b07c1 service nova] Acquiring lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.535974] env[68285]: DEBUG oslo_concurrency.lockutils [req-8a6199fb-a137-4b8d-8066-99cb8665bc39 req-f251e83e-e8ee-4a4a-a7cf-6913933b07c1 service nova] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.535974] env[68285]: DEBUG oslo_concurrency.lockutils [req-8a6199fb-a137-4b8d-8066-99cb8665bc39 req-f251e83e-e8ee-4a4a-a7cf-6913933b07c1 service nova] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.536229] env[68285]: DEBUG nova.compute.manager [req-8a6199fb-a137-4b8d-8066-99cb8665bc39 req-f251e83e-e8ee-4a4a-a7cf-6913933b07c1 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] No waiting events found dispatching network-vif-plugged-c34d5088-84d9-4ff5-97b4-906e64be2921 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1251.536503] env[68285]: WARNING nova.compute.manager [req-8a6199fb-a137-4b8d-8066-99cb8665bc39 req-f251e83e-e8ee-4a4a-a7cf-6913933b07c1 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Received unexpected event network-vif-plugged-c34d5088-84d9-4ff5-97b4-906e64be2921 for instance with vm_state building and task_state spawning. [ 1251.547595] env[68285]: DEBUG nova.objects.instance [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'flavor' on Instance uuid 68aee959-4168-43a7-a8d1-e6e126a52da5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.586240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "refresh_cache-a4fc942a-03e7-4415-bd95-f1f0e1344a69" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.586240] env[68285]: DEBUG nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Instance network_info: |[{"id": "2c6c4fc0-06b2-415e-b994-692b79103ce0", "address": "fa:16:3e:a0:90:ef", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c6c4fc0-06", "ovs_interfaceid": "2c6c4fc0-06b2-415e-b994-692b79103ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1251.586240] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:90:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c6c4fc0-06b2-415e-b994-692b79103ce0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.594666] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1251.594666] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1251.594666] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e10b8e3-9bf4-4080-b3e8-f76dea9e29dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.620996] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1251.620996] env[68285]: value = "task-2892390" [ 1251.620996] env[68285]: _type = "Task" [ 1251.620996] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.629894] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892390, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.643937] env[68285]: DEBUG nova.network.neutron [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Successfully updated port: c34d5088-84d9-4ff5-97b4-906e64be2921 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1251.695014] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.866s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.697638] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.763s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.698282] env[68285]: DEBUG nova.objects.instance [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lazy-loading 'resources' on Instance uuid be47df2a-aee7-4275-9acb-9cf74367f503 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.715858] env[68285]: INFO nova.scheduler.client.report [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Deleted allocations for instance 589d1560-9269-4de2-bd79-454ebdaa40d4 [ 1251.776997] env[68285]: DEBUG oslo_vmware.api [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892389, 'name': PowerOnVM_Task, 'duration_secs': 0.783397} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.777357] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.777545] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c508b8-1f3b-4248-ab2f-27ece38692c6 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance '5abddda1-9bf7-4039-81c7-8622f43cc72e' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1251.912314] env[68285]: DEBUG nova.compute.manager [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1251.912592] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1251.913527] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3672a15-7165-44e1-a09c-74c55d605118 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.922124] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1251.922396] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffb16131-d2a7-4af7-9db3-091a7866e343 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.929865] env[68285]: DEBUG oslo_vmware.api [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1251.929865] env[68285]: value = "task-2892391" [ 1251.929865] env[68285]: _type = "Task" [ 1251.929865] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.938594] env[68285]: DEBUG oslo_vmware.api [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.042511] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.043170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.053186] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68a63cee-fede-4a45-ad8f-e3df671ea76a tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.325s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.129582] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892390, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.146443] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "refresh_cache-bd3c9b84-794d-4302-bfb2-1181d5ad9552" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.146601] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquired lock "refresh_cache-bd3c9b84-794d-4302-bfb2-1181d5ad9552" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.146867] env[68285]: DEBUG nova.network.neutron [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.227127] env[68285]: DEBUG oslo_concurrency.lockutils [None req-255f41cd-f85e-4f78-9b55-bf36e4066751 tempest-ListImageFiltersTestJSON-1002506643 tempest-ListImageFiltersTestJSON-1002506643-project-member] Lock "589d1560-9269-4de2-bd79-454ebdaa40d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.464s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.443587] env[68285]: DEBUG oslo_vmware.api [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892391, 'name': PowerOffVM_Task, 'duration_secs': 0.270313} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.446179] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1252.446358] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.446787] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9b8d058-95ef-4407-82e6-a26a1fbc2f7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.458150] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afc8a09-b7b0-42f7-bce5-14149734a09b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.467025] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29dbf1f7-193c-4492-b80a-43b9ae0f5f66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.499848] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4614a398-cc55-4a62-83fb-732ebd430788 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.509916] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b87eace-12f9-4b52-b942-5b99ced75fdd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.528124] env[68285]: DEBUG nova.compute.provider_tree [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.531402] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1252.531611] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1252.531792] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleting the datastore file [datastore1] 5c0a91a4-b247-4950-8c7c-c62afdc4860f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1252.532051] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40cef6b4-729a-40c9-80e0-e50eb8543556 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.540281] env[68285]: DEBUG oslo_vmware.api [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for the task: (returnval){ [ 1252.540281] env[68285]: value = "task-2892393" [ 1252.540281] env[68285]: _type = "Task" [ 1252.540281] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.545627] env[68285]: DEBUG nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1252.552570] env[68285]: DEBUG oslo_vmware.api [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.605034] env[68285]: DEBUG oslo_concurrency.lockutils [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "68aee959-4168-43a7-a8d1-e6e126a52da5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.605330] env[68285]: DEBUG oslo_concurrency.lockutils [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.629710] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892390, 'name': CreateVM_Task, 'duration_secs': 0.581525} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.629897] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1252.630572] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.631140] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.631140] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1252.631299] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb52bf59-abf4-408d-9cc2-b0d374990c5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.637267] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1252.637267] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bf1d83-cea4-5c5b-05dd-cbbb4cf13cb1" [ 1252.637267] env[68285]: _type = "Task" [ 1252.637267] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.645617] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bf1d83-cea4-5c5b-05dd-cbbb4cf13cb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.693032] env[68285]: DEBUG nova.network.neutron [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1252.860757] env[68285]: DEBUG nova.network.neutron [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Updating instance_info_cache with network_info: [{"id": "c34d5088-84d9-4ff5-97b4-906e64be2921", "address": "fa:16:3e:57:c4:3c", "network": {"id": "82742763-5d2d-4805-8e93-26219a26ce63", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2117033789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ac9328b3c844571a4913db87fcdf383", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc34d5088-84", "ovs_interfaceid": "c34d5088-84d9-4ff5-97b4-906e64be2921", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.031053] env[68285]: DEBUG nova.scheduler.client.report [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1253.052080] env[68285]: DEBUG oslo_vmware.api [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Task: {'id': task-2892393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.444679} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.052417] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.052667] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.052934] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.053191] env[68285]: INFO nova.compute.manager [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1253.053494] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.055924] env[68285]: DEBUG nova.compute.manager [-] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1253.056091] env[68285]: DEBUG nova.network.neutron [-] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1253.073617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.108361] env[68285]: INFO nova.compute.manager [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Detaching volume 9bd4b699-6f0e-4337-bda4-c670e0842b41 [ 1253.143227] env[68285]: INFO nova.virt.block_device [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Attempting to driver detach volume 9bd4b699-6f0e-4337-bda4-c670e0842b41 from mountpoint /dev/sdb [ 1253.144027] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1253.144027] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581062', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'name': 'volume-9bd4b699-6f0e-4337-bda4-c670e0842b41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68aee959-4168-43a7-a8d1-e6e126a52da5', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'serial': '9bd4b699-6f0e-4337-bda4-c670e0842b41'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1253.144517] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2766f18-e4c2-4f5a-bf19-c17ec69dc002 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.155796] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bf1d83-cea4-5c5b-05dd-cbbb4cf13cb1, 'name': SearchDatastore_Task, 'duration_secs': 0.032181} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.170557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.170690] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1253.170908] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.171136] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.171255] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1253.171635] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1bec1fc-a120-4105-a454-6d67af1dc6d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.174127] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7659483-42ab-4b4f-97cb-17eceeab2eed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.182638] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fef8ce-24a3-4460-9b99-cb76d0885dbe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.188946] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1253.189133] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1253.208237] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f48843d3-235c-4264-9ad0-06e5a9f6c708 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.211341] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edeacf4f-0f1a-4257-b227-88e927546d6d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.218016] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1253.218016] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529d8687-3af4-16a1-484b-389371813185" [ 1253.218016] env[68285]: _type = "Task" [ 1253.218016] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.231506] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] The volume has not been displaced from its original location: [datastore1] volume-9bd4b699-6f0e-4337-bda4-c670e0842b41/volume-9bd4b699-6f0e-4337-bda4-c670e0842b41.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1253.236993] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Reconfiguring VM instance instance-00000060 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1253.240363] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1820245d-768e-4792-b55d-109e26b7b4cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.262116] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529d8687-3af4-16a1-484b-389371813185, 'name': SearchDatastore_Task, 'duration_secs': 0.020093} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.264115] env[68285]: DEBUG oslo_vmware.api [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1253.264115] env[68285]: value = "task-2892395" [ 1253.264115] env[68285]: _type = "Task" [ 1253.264115] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.264324] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-660eba8b-348a-4f40-a71e-7c8635262427 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.280957] env[68285]: DEBUG oslo_vmware.api [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892395, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.281235] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1253.281235] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52954564-d3a4-83c3-1123-f55ee2b58424" [ 1253.281235] env[68285]: _type = "Task" [ 1253.281235] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.291780] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52954564-d3a4-83c3-1123-f55ee2b58424, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.366650] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Releasing lock "refresh_cache-bd3c9b84-794d-4302-bfb2-1181d5ad9552" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.366967] env[68285]: DEBUG nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Instance network_info: |[{"id": "c34d5088-84d9-4ff5-97b4-906e64be2921", "address": "fa:16:3e:57:c4:3c", "network": {"id": "82742763-5d2d-4805-8e93-26219a26ce63", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2117033789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ac9328b3c844571a4913db87fcdf383", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc34d5088-84", "ovs_interfaceid": "c34d5088-84d9-4ff5-97b4-906e64be2921", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1253.367752] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:c4:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '777870ab-362f-4a17-9c1c-8d9cc26cd4ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c34d5088-84d9-4ff5-97b4-906e64be2921', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1253.375988] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Creating folder: Project (8ac9328b3c844571a4913db87fcdf383). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1253.376702] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90987a58-c420-439a-9d72-70b0a088ef45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.392580] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Created folder: Project (8ac9328b3c844571a4913db87fcdf383) in parent group-v580775. [ 1253.392853] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Creating folder: Instances. Parent ref: group-v581065. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1253.393225] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d23dfc9-8f37-4c3a-849c-9df0e35f08e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.408753] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Created folder: Instances in parent group-v581065. [ 1253.409056] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.409227] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1253.409532] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ccd9b12-cd57-464f-8e2a-565a80b7bf6c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.429748] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1253.429748] env[68285]: value = "task-2892398" [ 1253.429748] env[68285]: _type = "Task" [ 1253.429748] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.438651] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892398, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.535849] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.838s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.538174] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.029s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.539256] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.539256] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1253.539256] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.655s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.540738] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d800b315-a7d7-43bf-95c3-61e12443e9bd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.550311] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf72779f-babf-49e0-a3e4-ad2e3422e431 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.567499] env[68285]: INFO nova.scheduler.client.report [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Deleted allocations for instance be47df2a-aee7-4275-9acb-9cf74367f503 [ 1253.568789] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706aac7a-3e60-4a3e-9b67-273a9076207c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.573033] env[68285]: DEBUG nova.compute.manager [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Received event network-changed-c34d5088-84d9-4ff5-97b4-906e64be2921 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1253.573033] env[68285]: DEBUG nova.compute.manager [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Refreshing instance network info cache due to event network-changed-c34d5088-84d9-4ff5-97b4-906e64be2921. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1253.573033] env[68285]: DEBUG oslo_concurrency.lockutils [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] Acquiring lock "refresh_cache-bd3c9b84-794d-4302-bfb2-1181d5ad9552" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.573033] env[68285]: DEBUG oslo_concurrency.lockutils [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] Acquired lock "refresh_cache-bd3c9b84-794d-4302-bfb2-1181d5ad9552" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.573224] env[68285]: DEBUG nova.network.neutron [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Refreshing network info cache for port c34d5088-84d9-4ff5-97b4-906e64be2921 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1253.585230] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5053cd3d-0b87-426e-9641-add3f980b3ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.618762] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179066MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1253.618910] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.777618] env[68285]: DEBUG oslo_vmware.api [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892395, 'name': ReconfigVM_Task, 'duration_secs': 0.297481} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.778076] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Reconfigured VM instance instance-00000060 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1253.783060] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71d37667-14de-4b17-98c5-f3d9d8e66fab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.802401] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52954564-d3a4-83c3-1123-f55ee2b58424, 'name': SearchDatastore_Task, 'duration_secs': 0.018966} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.803905] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.804190] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a4fc942a-03e7-4415-bd95-f1f0e1344a69/a4fc942a-03e7-4415-bd95-f1f0e1344a69.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1253.804513] env[68285]: DEBUG oslo_vmware.api [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1253.804513] env[68285]: value = "task-2892399" [ 1253.804513] env[68285]: _type = "Task" [ 1253.804513] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.804705] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3538e20-f0be-4841-9d01-e80695b1c60d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.806843] env[68285]: DEBUG nova.network.neutron [-] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.817921] env[68285]: DEBUG oslo_vmware.api [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892399, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.820160] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1253.820160] env[68285]: value = "task-2892400" [ 1253.820160] env[68285]: _type = "Task" [ 1253.820160] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.830670] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.943434] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892398, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.047126] env[68285]: INFO nova.compute.claims [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1254.085344] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ddffeba1-ac84-42e2-8c01-1fa96146ca6b tempest-ServersNegativeTestJSON-639600364 tempest-ServersNegativeTestJSON-639600364-project-member] Lock "be47df2a-aee7-4275-9acb-9cf74367f503" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.732s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.312358] env[68285]: INFO nova.compute.manager [-] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Took 1.26 seconds to deallocate network for instance. [ 1254.321344] env[68285]: DEBUG oslo_vmware.api [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892399, 'name': ReconfigVM_Task, 'duration_secs': 0.178831} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.321344] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581062', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'name': 'volume-9bd4b699-6f0e-4337-bda4-c670e0842b41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '68aee959-4168-43a7-a8d1-e6e126a52da5', 'attached_at': '', 'detached_at': '', 'volume_id': '9bd4b699-6f0e-4337-bda4-c670e0842b41', 'serial': '9bd4b699-6f0e-4337-bda4-c670e0842b41'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1254.335726] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892400, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.442711] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892398, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.551191] env[68285]: DEBUG nova.network.neutron [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Updated VIF entry in instance network info cache for port c34d5088-84d9-4ff5-97b4-906e64be2921. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1254.551576] env[68285]: DEBUG nova.network.neutron [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Updating instance_info_cache with network_info: [{"id": "c34d5088-84d9-4ff5-97b4-906e64be2921", "address": "fa:16:3e:57:c4:3c", "network": {"id": "82742763-5d2d-4805-8e93-26219a26ce63", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2117033789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ac9328b3c844571a4913db87fcdf383", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc34d5088-84", "ovs_interfaceid": "c34d5088-84d9-4ff5-97b4-906e64be2921", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.554191] env[68285]: INFO nova.compute.resource_tracker [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating resource usage from migration 0d716d09-ecc5-4817-b49d-530c6d2f7096 [ 1254.824869] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.836274] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524949} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.837102] env[68285]: DEBUG nova.network.neutron [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Port 10900535-c864-4616-a243-0798b3cdb70a binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1254.837349] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.838285] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.838285] env[68285]: DEBUG nova.network.neutron [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1254.838909] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a4fc942a-03e7-4415-bd95-f1f0e1344a69/a4fc942a-03e7-4415-bd95-f1f0e1344a69.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1254.839145] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1254.839413] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23169f3a-8a47-4e45-a21b-17dff7ff352f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.853344] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1254.853344] env[68285]: value = "task-2892401" [ 1254.853344] env[68285]: _type = "Task" [ 1254.853344] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.861698] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867792ea-8339-4632-b1e6-cb8523116058 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.868191] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.875063] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069b242f-f839-4c6b-9601-aaff751a8a9e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.918918] env[68285]: DEBUG nova.objects.instance [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'flavor' on Instance uuid 68aee959-4168-43a7-a8d1-e6e126a52da5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1254.921493] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1aa5a4-430d-4c25-be3b-bd550c62c7d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.934023] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e3bcf4-700d-459e-ab5d-c9a619687106 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.947175] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892398, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.955706] env[68285]: DEBUG nova.compute.provider_tree [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.058016] env[68285]: DEBUG oslo_concurrency.lockutils [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] Releasing lock "refresh_cache-bd3c9b84-794d-4302-bfb2-1181d5ad9552" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.058319] env[68285]: DEBUG nova.compute.manager [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Received event network-vif-deleted-4b0deb3b-eaad-4d59-befc-a8a795b1472f {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1255.058504] env[68285]: INFO nova.compute.manager [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Neutron deleted interface 4b0deb3b-eaad-4d59-befc-a8a795b1472f; detaching it from the instance and deleting it from the info cache [ 1255.058677] env[68285]: DEBUG nova.network.neutron [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.364490] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.183768} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.365142] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1255.365596] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123b9f33-9952-4264-9f60-8d7d011fb5f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.391024] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] a4fc942a-03e7-4415-bd95-f1f0e1344a69/a4fc942a-03e7-4415-bd95-f1f0e1344a69.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1255.391024] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05125df1-7639-423a-9a0f-a7037edf3879 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.417026] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1255.417026] env[68285]: value = "task-2892403" [ 1255.417026] env[68285]: _type = "Task" [ 1255.417026] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.425461] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.447877] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892398, 'name': CreateVM_Task, 'duration_secs': 1.582104} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.448076] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1255.448893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.449082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.449456] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1255.449730] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db9dc192-03f9-4bee-928d-58d2707e558d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.456750] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1255.456750] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529c8985-3095-7823-bd42-4255b788ced6" [ 1255.456750] env[68285]: _type = "Task" [ 1255.456750] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.461163] env[68285]: DEBUG nova.scheduler.client.report [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1255.475203] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529c8985-3095-7823-bd42-4255b788ced6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.561482] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df7297f7-7ba4-4a70-9606-aea6c7972cb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.582258] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9977adeb-08fb-414e-89b9-260da9e04f5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.626591] env[68285]: DEBUG nova.compute.manager [req-19db4727-65e8-4dd0-8d8a-fcd609b1553a req-17925c81-16f7-46e3-8d1e-c3d0a6efbde4 service nova] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Detach interface failed, port_id=4b0deb3b-eaad-4d59-befc-a8a795b1472f, reason: Instance 5c0a91a4-b247-4950-8c7c-c62afdc4860f could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1255.833612] env[68285]: DEBUG nova.network.neutron [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.929829] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.930775] env[68285]: DEBUG oslo_concurrency.lockutils [None req-af0d2dc7-171a-4de5-9ab1-4511efea8491 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.325s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.972526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.434s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.972761] env[68285]: INFO nova.compute.manager [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Migrating [ 1255.980662] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529c8985-3095-7823-bd42-4255b788ced6, 'name': SearchDatastore_Task, 'duration_secs': 0.019376} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.984747] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.911s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.988690] env[68285]: INFO nova.compute.claims [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1255.992762] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.992916] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1255.993124] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.993276] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.993467] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1255.999365] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee74f492-6be7-4b94-b738-7c487beacc4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.011986] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1256.012250] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1256.013160] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b685cc1-99cc-4b40-a33c-bfeb3ddaa245 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.024359] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1256.024359] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e1a58f-7b21-dea7-81d6-54f4f01291f1" [ 1256.024359] env[68285]: _type = "Task" [ 1256.024359] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.036430] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e1a58f-7b21-dea7-81d6-54f4f01291f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.337677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.436223] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892403, 'name': ReconfigVM_Task, 'duration_secs': 0.728572} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.436223] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Reconfigured VM instance instance-0000006a to attach disk [datastore2] a4fc942a-03e7-4415-bd95-f1f0e1344a69/a4fc942a-03e7-4415-bd95-f1f0e1344a69.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1256.436223] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac6e1125-b71d-4e02-a508-26a0c27d4e8f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.444822] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1256.444822] env[68285]: value = "task-2892404" [ 1256.444822] env[68285]: _type = "Task" [ 1256.444822] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.457370] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892404, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.502715] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.502901] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.504564] env[68285]: DEBUG nova.network.neutron [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1256.536152] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e1a58f-7b21-dea7-81d6-54f4f01291f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011556} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.537040] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64fce79b-8172-41b6-918e-b0323411073d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.543258] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1256.543258] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ffd035-dede-7607-dc46-3185926b1e6a" [ 1256.543258] env[68285]: _type = "Task" [ 1256.543258] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.553699] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ffd035-dede-7607-dc46-3185926b1e6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.841936] env[68285]: DEBUG nova.compute.manager [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68285) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1256.943897] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "68aee959-4168-43a7-a8d1-e6e126a52da5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.944216] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.944441] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "68aee959-4168-43a7-a8d1-e6e126a52da5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.944629] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.944799] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.950298] env[68285]: INFO nova.compute.manager [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Terminating instance [ 1256.961227] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892404, 'name': Rename_Task, 'duration_secs': 0.429301} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.961813] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1256.961813] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08fcabbb-9c1c-425b-ada8-ba1d2fb76dbc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.970737] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1256.970737] env[68285]: value = "task-2892406" [ 1256.970737] env[68285]: _type = "Task" [ 1256.970737] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.979804] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.061436] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ffd035-dede-7607-dc46-3185926b1e6a, 'name': SearchDatastore_Task, 'duration_secs': 0.013587} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.061595] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.061805] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] bd3c9b84-794d-4302-bfb2-1181d5ad9552/bd3c9b84-794d-4302-bfb2-1181d5ad9552.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1257.062093] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1a32fe7-384c-4acf-9100-118e10e0e348 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.077022] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1257.077022] env[68285]: value = "task-2892407" [ 1257.077022] env[68285]: _type = "Task" [ 1257.077022] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.083925] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.288386] env[68285]: DEBUG nova.network.neutron [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance_info_cache with network_info: [{"id": "d00eb1aa-97af-4a18-9582-416989e71604", "address": "fa:16:3e:0f:89:60", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd00eb1aa-97", "ovs_interfaceid": "d00eb1aa-97af-4a18-9582-416989e71604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.305501] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e52b19-abf5-4be2-a271-aabc9de51d39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.315740] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac506c6-9e9d-4784-bdbc-4094e026d532 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.354674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d355cb44-9c2d-4293-a3d6-400564a3dae9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.364770] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f78a00-6530-4405-af24-5d5a159607b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.383301] env[68285]: DEBUG nova.compute.provider_tree [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1257.457870] env[68285]: DEBUG nova.compute.manager [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1257.458197] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1257.459049] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f25078-5671-456c-84ae-c641c0539c03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.472510] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1257.476093] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33ee88ba-908d-4804-af03-fb7ac8d7a8ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.485954] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892406, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.488295] env[68285]: DEBUG oslo_vmware.api [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1257.488295] env[68285]: value = "task-2892408" [ 1257.488295] env[68285]: _type = "Task" [ 1257.488295] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.498235] env[68285]: DEBUG oslo_vmware.api [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.587318] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892407, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.796189] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.905300] env[68285]: ERROR nova.scheduler.client.report [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [req-e6903cd7-3da2-461e-acb6-36fd3180ba21] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e6903cd7-3da2-461e-acb6-36fd3180ba21"}]} [ 1257.925990] env[68285]: DEBUG nova.scheduler.client.report [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1257.938792] env[68285]: DEBUG nova.scheduler.client.report [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1257.939736] env[68285]: DEBUG nova.compute.provider_tree [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1257.952545] env[68285]: DEBUG nova.scheduler.client.report [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1257.969594] env[68285]: DEBUG nova.scheduler.client.report [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1257.982612] env[68285]: DEBUG oslo_vmware.api [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892406, 'name': PowerOnVM_Task, 'duration_secs': 0.755948} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.982885] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1257.983653] env[68285]: INFO nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1257.983653] env[68285]: DEBUG nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1257.984520] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0442ea08-6d5c-417b-9382-a7ea9e4a0ab9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.991884] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.002084] env[68285]: DEBUG oslo_vmware.api [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892408, 'name': PowerOffVM_Task, 'duration_secs': 0.350505} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.003709] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1258.003968] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1258.009085] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be500cb1-e26e-4823-9ec8-fe8a901d8a19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.090253] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589873} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.090981] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1258.091056] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1258.091241] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleting the datastore file [datastore1] 68aee959-4168-43a7-a8d1-e6e126a52da5 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1258.091520] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] bd3c9b84-794d-4302-bfb2-1181d5ad9552/bd3c9b84-794d-4302-bfb2-1181d5ad9552.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1258.091842] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1258.092105] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83772625-2792-4b53-80a1-41e6a824179d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.096689] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c311df6-1228-4c29-bd47-256664325b27 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.106280] env[68285]: DEBUG oslo_vmware.api [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1258.106280] env[68285]: value = "task-2892411" [ 1258.106280] env[68285]: _type = "Task" [ 1258.106280] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.107398] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1258.107398] env[68285]: value = "task-2892410" [ 1258.107398] env[68285]: _type = "Task" [ 1258.107398] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.123989] env[68285]: DEBUG oslo_vmware.api [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.127779] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.272291] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab6d526-85bc-49ac-81ea-e6c4f5322e33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.280141] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.280387] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.284954] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9188a6c1-8d56-4eb7-a751-cdd334ae2904 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.325162] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87deb76-c6f4-485c-a4e3-2c4842d8e4dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.334870] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3b5ae6-ea9b-491c-8519-c41f73fa9660 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.354628] env[68285]: DEBUG nova.compute.provider_tree [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.518251] env[68285]: INFO nova.compute.manager [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Took 21.79 seconds to build instance. [ 1258.619507] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07844} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.622737] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1258.623055] env[68285]: DEBUG oslo_vmware.api [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161729} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.623771] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74eefd4-c14a-449a-a4b9-9a3871461052 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.626398] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1258.626660] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1258.626909] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1258.627114] env[68285]: INFO nova.compute.manager [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1258.627351] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1258.627585] env[68285]: DEBUG nova.compute.manager [-] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1258.627691] env[68285]: DEBUG nova.network.neutron [-] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1258.651562] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] bd3c9b84-794d-4302-bfb2-1181d5ad9552/bd3c9b84-794d-4302-bfb2-1181d5ad9552.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1258.651941] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83c40d53-6134-4884-92a7-f0182446d3ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.674294] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1258.674294] env[68285]: value = "task-2892412" [ 1258.674294] env[68285]: _type = "Task" [ 1258.674294] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.685371] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892412, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.784587] env[68285]: DEBUG nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1258.857388] env[68285]: DEBUG nova.scheduler.client.report [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1259.020826] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8e89e5b-eaed-4ecf-9a40-70535dd6b848 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.304s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.110177] env[68285]: DEBUG nova.compute.manager [req-8e985efb-b7f0-4b5d-b522-786d4bed5ed8 req-7709c530-f482-4e11-8eab-0b98362931c9 service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Received event network-vif-deleted-47dedd89-6346-46ef-93a1-287c2727d7cc {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1259.110509] env[68285]: INFO nova.compute.manager [req-8e985efb-b7f0-4b5d-b522-786d4bed5ed8 req-7709c530-f482-4e11-8eab-0b98362931c9 service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Neutron deleted interface 47dedd89-6346-46ef-93a1-287c2727d7cc; detaching it from the instance and deleting it from the info cache [ 1259.110555] env[68285]: DEBUG nova.network.neutron [req-8e985efb-b7f0-4b5d-b522-786d4bed5ed8 req-7709c530-f482-4e11-8eab-0b98362931c9 service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.189043] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892412, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.309144] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.338229] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e29ac3-1e0d-4ce2-9c6a-12d33df35e5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.363456] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.379s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.364019] env[68285]: DEBUG nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1259.367702] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance 'd4818c98-8134-4426-bd35-b2339ed6abd4' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1259.371718] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.753s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.589712] env[68285]: DEBUG nova.network.neutron [-] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.613930] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff7898c0-4a5a-4a32-82f2-66c06e78608b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.624098] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be9c9fb-e57d-40c3-825d-23016263fc98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.661172] env[68285]: DEBUG nova.compute.manager [req-8e985efb-b7f0-4b5d-b522-786d4bed5ed8 req-7709c530-f482-4e11-8eab-0b98362931c9 service nova] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Detach interface failed, port_id=47dedd89-6346-46ef-93a1-287c2727d7cc, reason: Instance 68aee959-4168-43a7-a8d1-e6e126a52da5 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1259.685350] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892412, 'name': ReconfigVM_Task, 'duration_secs': 0.55563} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.685626] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Reconfigured VM instance instance-0000006b to attach disk [datastore2] bd3c9b84-794d-4302-bfb2-1181d5ad9552/bd3c9b84-794d-4302-bfb2-1181d5ad9552.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1259.686285] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b64775a-7207-46f4-b2ea-8cc5a6d9fef7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.693373] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1259.693373] env[68285]: value = "task-2892414" [ 1259.693373] env[68285]: _type = "Task" [ 1259.693373] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.701875] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892414, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.869371] env[68285]: DEBUG nova.compute.utils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1259.870922] env[68285]: DEBUG nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1259.870922] env[68285]: DEBUG nova.network.neutron [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1259.874219] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1259.874481] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a47c2b1c-f051-4253-ba34-ca453ef4fcf3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.891452] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1259.891452] env[68285]: value = "task-2892415" [ 1259.891452] env[68285]: _type = "Task" [ 1259.891452] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.903664] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.915306] env[68285]: DEBUG nova.policy [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9aef349348af4f138b71a8b257300b03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0142f80018fe4d41830f10307dd482f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1260.071548] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.071957] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.072271] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.076022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.076022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.083810] env[68285]: INFO nova.compute.manager [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Terminating instance [ 1260.093130] env[68285]: INFO nova.compute.manager [-] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Took 1.47 seconds to deallocate network for instance. [ 1260.204705] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892414, 'name': Rename_Task, 'duration_secs': 0.320153} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.205047] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1260.205345] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a037f782-0bb6-48a1-9e42-c44a23fbe004 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.213172] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1260.213172] env[68285]: value = "task-2892416" [ 1260.213172] env[68285]: _type = "Task" [ 1260.213172] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.222933] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.374356] env[68285]: DEBUG nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1260.386610] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Applying migration context for instance 5abddda1-9bf7-4039-81c7-8622f43cc72e as it has an incoming, in-progress migration 0f8618b8-03e2-48c3-886c-a1c9bc490e78. Migration status is reverting {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1260.387042] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Applying migration context for instance d4818c98-8134-4426-bd35-b2339ed6abd4 as it has an incoming, in-progress migration 0d716d09-ecc5-4817-b49d-530c6d2f7096. Migration status is migrating {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1260.388895] env[68285]: INFO nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating resource usage from migration 0f8618b8-03e2-48c3-886c-a1c9bc490e78 [ 1260.389255] env[68285]: INFO nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating resource usage from migration 0d716d09-ecc5-4817-b49d-530c6d2f7096 [ 1260.402190] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892415, 'name': PowerOffVM_Task, 'duration_secs': 0.27109} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.406278] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1260.406278] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance 'd4818c98-8134-4426-bd35-b2339ed6abd4' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1260.417285] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.417423] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 3858399e-9fc4-4d60-a9d5-95caefb7bd87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.417546] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d1446290-95ce-4e87-85df-7cc69bb57ce7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.417664] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 68aee959-4168-43a7-a8d1-e6e126a52da5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.417795] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 801f524e-28b5-4452-b880-0fc30d3c5eef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.417921] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance feda1a98-3086-43a6-a887-f4d1602ca8ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.418057] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 8a649b1e-d007-4032-a46c-b479365e5289 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.418188] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 8917672f-3b0d-42a1-b8b1-94ac47ce941a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.418304] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance b2199b56-64bd-4096-b877-e10656b09313 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.418482] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Migration 0f8618b8-03e2-48c3-886c-a1c9bc490e78 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1260.418642] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5abddda1-9bf7-4039-81c7-8622f43cc72e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.418792] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 5c0a91a4-b247-4950-8c7c-c62afdc4860f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1260.418911] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a4fc942a-03e7-4415-bd95-f1f0e1344a69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.419034] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance bd3c9b84-794d-4302-bfb2-1181d5ad9552 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.419161] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Migration 0d716d09-ecc5-4817-b49d-530c6d2f7096 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1260.419296] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d4818c98-8134-4426-bd35-b2339ed6abd4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.419429] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 852ab501-00a6-442b-804a-1bbf49a2be8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1260.449658] env[68285]: DEBUG nova.network.neutron [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Successfully created port: f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1260.589020] env[68285]: DEBUG nova.compute.manager [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1260.589291] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1260.590241] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e71a6a3-672b-478c-86b5-0486b613ae80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.598888] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.599152] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77f4f5bd-f867-465d-8d46-9a35d9f50868 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.601960] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.608857] env[68285]: DEBUG oslo_vmware.api [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1260.608857] env[68285]: value = "task-2892417" [ 1260.608857] env[68285]: _type = "Task" [ 1260.608857] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.620222] env[68285]: DEBUG oslo_vmware.api [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.723577] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892416, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.909238] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1260.909490] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1260.909653] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1260.909837] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1260.909985] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1260.910150] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1260.910367] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1260.910630] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1260.910832] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1260.911096] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1260.911301] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1260.917799] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adb15401-bdd5-43ca-a51b-1541c3a9938a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.928817] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1260.928961] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1260.929124] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1260.938980] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1260.938980] env[68285]: value = "task-2892418" [ 1260.938980] env[68285]: _type = "Task" [ 1260.938980] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.949929] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892418, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.121332] env[68285]: DEBUG oslo_vmware.api [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892417, 'name': PowerOffVM_Task, 'duration_secs': 0.263024} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.121738] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.121885] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1261.122176] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d8cf370-ffcf-498f-b5b7-f9d6fcda08c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.193670] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2b7d01-b636-46a7-bc80-a490bd48320f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.201352] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b72472-2cdc-4b6b-80b8-38cb08d88630 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.238595] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c9e4d4-3dd0-47b6-bc42-7af42286d94b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.240905] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1261.241107] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1261.241284] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleting the datastore file [datastore2] a4fc942a-03e7-4415-bd95-f1f0e1344a69 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1261.241735] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50ad60b5-ea1a-4176-bf8f-abbd87f2fe15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.252674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f790db8-27dd-40a8-bd46-655117185459 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.257521] env[68285]: DEBUG oslo_vmware.api [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1261.257521] env[68285]: value = "task-2892421" [ 1261.257521] env[68285]: _type = "Task" [ 1261.257521] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.257757] env[68285]: DEBUG oslo_vmware.api [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892416, 'name': PowerOnVM_Task, 'duration_secs': 0.679732} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.258107] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1261.258325] env[68285]: INFO nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Took 10.41 seconds to spawn the instance on the hypervisor. [ 1261.258783] env[68285]: DEBUG nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1261.263274] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09639c9b-10ef-4578-9be4-7cc3b2d5d8ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.275013] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.288038] env[68285]: DEBUG oslo_vmware.api [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.384176] env[68285]: DEBUG nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1261.411446] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1261.412018] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1261.412018] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1261.412130] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1261.412281] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1261.412433] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1261.412641] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1261.412833] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1261.413128] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1261.413320] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1261.413497] env[68285]: DEBUG nova.virt.hardware [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1261.414369] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0b9ee0-12db-4ce4-9b6a-d870d58e7df1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.422854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3f18a5-ea74-4776-888f-60aee49b14b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.447328] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892418, 'name': ReconfigVM_Task, 'duration_secs': 0.289707} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.447573] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance 'd4818c98-8134-4426-bd35-b2339ed6abd4' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1261.767919] env[68285]: DEBUG oslo_vmware.api [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387513} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.768188] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.768434] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1261.768597] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1261.768810] env[68285]: INFO nova.compute.manager [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1261.769087] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1261.769391] env[68285]: DEBUG nova.compute.manager [-] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1261.769391] env[68285]: DEBUG nova.network.neutron [-] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1261.777982] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1261.794422] env[68285]: INFO nova.compute.manager [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Took 23.09 seconds to build instance. [ 1261.954381] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1261.954653] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1261.954852] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1261.955172] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1261.955265] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1261.955369] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1261.955555] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1261.955986] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1261.955986] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1261.956073] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1261.956198] env[68285]: DEBUG nova.virt.hardware [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1261.961720] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1261.962035] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f181d1aa-2cd8-4896-9755-5c9c112d5e66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.983271] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1261.983271] env[68285]: value = "task-2892422" [ 1261.983271] env[68285]: _type = "Task" [ 1261.983271] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.992085] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892422, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.011133] env[68285]: DEBUG nova.compute.manager [req-3003f8b0-8b45-4ed8-8bdf-5dc068cd3fc8 req-5fc6f3dd-fe28-4587-ad91-1c964e3c28e8 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Received event network-vif-plugged-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1262.011133] env[68285]: DEBUG oslo_concurrency.lockutils [req-3003f8b0-8b45-4ed8-8bdf-5dc068cd3fc8 req-5fc6f3dd-fe28-4587-ad91-1c964e3c28e8 service nova] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.011133] env[68285]: DEBUG oslo_concurrency.lockutils [req-3003f8b0-8b45-4ed8-8bdf-5dc068cd3fc8 req-5fc6f3dd-fe28-4587-ad91-1c964e3c28e8 service nova] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.011133] env[68285]: DEBUG oslo_concurrency.lockutils [req-3003f8b0-8b45-4ed8-8bdf-5dc068cd3fc8 req-5fc6f3dd-fe28-4587-ad91-1c964e3c28e8 service nova] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.011133] env[68285]: DEBUG nova.compute.manager [req-3003f8b0-8b45-4ed8-8bdf-5dc068cd3fc8 req-5fc6f3dd-fe28-4587-ad91-1c964e3c28e8 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] No waiting events found dispatching network-vif-plugged-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1262.011133] env[68285]: WARNING nova.compute.manager [req-3003f8b0-8b45-4ed8-8bdf-5dc068cd3fc8 req-5fc6f3dd-fe28-4587-ad91-1c964e3c28e8 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Received unexpected event network-vif-plugged-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 for instance with vm_state building and task_state spawning. [ 1262.100352] env[68285]: DEBUG nova.network.neutron [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Successfully updated port: f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1262.282882] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1262.283183] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.912s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.283379] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.459s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.283555] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.285742] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 4.294s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.296273] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99801132-3df4-40a1-8eb5-813b13140bea tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.614s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.309665] env[68285]: INFO nova.scheduler.client.report [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Deleted allocations for instance 5c0a91a4-b247-4950-8c7c-c62afdc4860f [ 1262.496282] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892422, 'name': ReconfigVM_Task, 'duration_secs': 0.198146} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.496566] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1262.497372] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b255a5-cfb0-450d-bf9c-e8c6551d81d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.519924] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] d4818c98-8134-4426-bd35-b2339ed6abd4/d4818c98-8134-4426-bd35-b2339ed6abd4.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1262.520207] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9519533a-89b6-4662-9474-a9f99dec9f42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.538276] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1262.538276] env[68285]: value = "task-2892423" [ 1262.538276] env[68285]: _type = "Task" [ 1262.538276] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.547138] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.603125] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.603278] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.603421] env[68285]: DEBUG nova.network.neutron [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1262.652782] env[68285]: DEBUG nova.network.neutron [-] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.738423] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.738592] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.738851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.739091] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.739231] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.741303] env[68285]: INFO nova.compute.manager [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Terminating instance [ 1262.788818] env[68285]: DEBUG nova.objects.instance [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'migration_context' on Instance uuid 5abddda1-9bf7-4039-81c7-8622f43cc72e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1262.816263] env[68285]: DEBUG oslo_concurrency.lockutils [None req-abb7a92a-56be-4962-977a-e94d59b4cfac tempest-ServerDiskConfigTestJSON-1313847802 tempest-ServerDiskConfigTestJSON-1313847802-project-member] Lock "5c0a91a4-b247-4950-8c7c-c62afdc4860f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.413s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.049136] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892423, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.138734] env[68285]: DEBUG nova.network.neutron [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1263.155664] env[68285]: INFO nova.compute.manager [-] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Took 1.39 seconds to deallocate network for instance. [ 1263.245056] env[68285]: DEBUG nova.compute.manager [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1263.245294] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1263.246248] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25924d94-2d97-4ecf-9b0e-7d45d845e26e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.256896] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1263.257168] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c522e24b-3e4e-4839-b143-a040a686b481 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.264531] env[68285]: DEBUG oslo_vmware.api [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1263.264531] env[68285]: value = "task-2892424" [ 1263.264531] env[68285]: _type = "Task" [ 1263.264531] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.276075] env[68285]: DEBUG oslo_vmware.api [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.285393] env[68285]: DEBUG nova.network.neutron [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.544994] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938533ea-29f3-42ff-a56a-1c054868b350 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.555661] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892423, 'name': ReconfigVM_Task, 'duration_secs': 0.853111} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.557375] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6c57ac-19b1-4b35-b583-469f4c66878c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.560379] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Reconfigured VM instance instance-00000068 to attach disk [datastore1] d4818c98-8134-4426-bd35-b2339ed6abd4/d4818c98-8134-4426-bd35-b2339ed6abd4.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1263.560625] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance 'd4818c98-8134-4426-bd35-b2339ed6abd4' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1263.610986] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b664f8d2-85d1-47b2-928d-9e39b0635162 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.622480] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b9dfee-0f06-438e-a21c-c43a768ac23b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.637366] env[68285]: DEBUG nova.compute.provider_tree [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.665524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.775567] env[68285]: DEBUG oslo_vmware.api [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892424, 'name': PowerOffVM_Task, 'duration_secs': 0.229468} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.775844] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1263.776027] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1263.776680] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e557896e-28d5-4cae-82d0-b4a805420c7a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.787895] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.788217] env[68285]: DEBUG nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Instance network_info: |[{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1263.788602] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:84:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a15de394-0367-4921-a5c1-6ac8615e3283', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4b82f26-eff6-4869-af1c-0bc1a3a4d606', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1263.796162] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1263.796365] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1263.796584] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d050551f-ed6f-49c1-bbf0-995c04a1c6f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.817810] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1263.817810] env[68285]: value = "task-2892426" [ 1263.817810] env[68285]: _type = "Task" [ 1263.817810] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.826612] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892426, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.875148] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1263.875353] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1263.875515] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Deleting the datastore file [datastore2] bd3c9b84-794d-4302-bfb2-1181d5ad9552 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1263.875793] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0cc9c02-a017-4a12-8aae-01ac1e119017 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.884094] env[68285]: DEBUG oslo_vmware.api [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for the task: (returnval){ [ 1263.884094] env[68285]: value = "task-2892427" [ 1263.884094] env[68285]: _type = "Task" [ 1263.884094] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.895532] env[68285]: DEBUG oslo_vmware.api [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.053075] env[68285]: DEBUG nova.compute.manager [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Received event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1264.053195] env[68285]: DEBUG nova.compute.manager [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing instance network info cache due to event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1264.053898] env[68285]: DEBUG oslo_concurrency.lockutils [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] Acquiring lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.053898] env[68285]: DEBUG oslo_concurrency.lockutils [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] Acquired lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.053898] env[68285]: DEBUG nova.network.neutron [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1264.066961] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b906670-c011-457b-8134-823260db06b2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.089837] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b64473-bc20-402f-abf4-4b904b499c85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.110790] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance 'd4818c98-8134-4426-bd35-b2339ed6abd4' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1264.142021] env[68285]: DEBUG nova.scheduler.client.report [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1264.328255] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892426, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.394867] env[68285]: DEBUG oslo_vmware.api [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Task: {'id': task-2892427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201289} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.395040] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1264.395187] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1264.395363] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1264.395538] env[68285]: INFO nova.compute.manager [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1264.395775] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1264.395971] env[68285]: DEBUG nova.compute.manager [-] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1264.396075] env[68285]: DEBUG nova.network.neutron [-] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1264.685992] env[68285]: DEBUG nova.network.neutron [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Port d00eb1aa-97af-4a18-9582-416989e71604 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1264.833464] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892426, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.907996] env[68285]: DEBUG nova.network.neutron [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updated VIF entry in instance network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1264.908937] env[68285]: DEBUG nova.network.neutron [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.161159] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.873s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.167118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.856s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.167118] env[68285]: INFO nova.compute.claims [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1265.197749] env[68285]: DEBUG nova.network.neutron [-] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.331668] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892426, 'name': CreateVM_Task, 'duration_secs': 1.075061} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.332050] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1265.332670] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.332802] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1265.333176] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1265.333447] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19f0509d-fb87-4cee-806b-0056bf01d18c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.339701] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1265.339701] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527a07c2-c2e9-b615-9a1e-8965ed7414fd" [ 1265.339701] env[68285]: _type = "Task" [ 1265.339701] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.349062] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527a07c2-c2e9-b615-9a1e-8965ed7414fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.412594] env[68285]: DEBUG oslo_concurrency.lockutils [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] Releasing lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.413376] env[68285]: DEBUG nova.compute.manager [req-7767e3aa-7a23-44c2-9e71-ae59d5423b69 req-0a2c6f97-b581-4190-84fa-12a537477ac9 service nova] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Received event network-vif-deleted-2c6c4fc0-06b2-415e-b994-692b79103ce0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1265.448695] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "faf810ae-7823-4115-a709-99dc7c480867" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.448695] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.867218] env[68285]: INFO nova.compute.manager [-] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Took 1.47 seconds to deallocate network for instance. [ 1265.885282] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.885903] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.885903] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.895371] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527a07c2-c2e9-b615-9a1e-8965ed7414fd, 'name': SearchDatastore_Task, 'duration_secs': 0.011668} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.897247] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.897247] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1265.897247] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.897247] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1265.899322] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1265.899322] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f383e22-779a-4c2a-ad19-37b7efede072 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.909672] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1265.909850] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1265.910851] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-432660be-34dd-49c3-83eb-0d28a359d849 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.920303] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1265.920303] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524f0051-39a0-49c1-9bf0-423b34db12d6" [ 1265.920303] env[68285]: _type = "Task" [ 1265.920303] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.930045] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524f0051-39a0-49c1-9bf0-423b34db12d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.955707] env[68285]: DEBUG nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1266.101498] env[68285]: DEBUG nova.compute.manager [req-a41f692c-6d5f-4b86-944c-c5bc6eef49de req-1b7a8af6-6f93-4a0d-887b-344a841a2927 service nova] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Received event network-vif-deleted-c34d5088-84d9-4ff5-97b4-906e64be2921 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1266.160555] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca3a816-b539-4104-bec1-40c0a759eb17 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.168479] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835e0d1b-1ae5-461e-a526-5b46d31b9f33 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.199054] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ce34b1-8858-40f7-b927-1c2b1701e3c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.206758] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77afa8a-0bfb-45eb-9c2e-ad7ea6e08544 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.220145] env[68285]: DEBUG nova.compute.provider_tree [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.389864] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.432834] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524f0051-39a0-49c1-9bf0-423b34db12d6, 'name': SearchDatastore_Task, 'duration_secs': 0.011463} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.433638] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9beb78f1-be36-48fe-ac6c-c4ca9ea8c66d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.439814] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1266.439814] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52283b4f-f3b5-c108-eb2f-51c64a70ae4e" [ 1266.439814] env[68285]: _type = "Task" [ 1266.439814] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.450360] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52283b4f-f3b5-c108-eb2f-51c64a70ae4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.486270] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.723217] env[68285]: DEBUG nova.scheduler.client.report [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1266.896018] env[68285]: INFO nova.compute.manager [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Swapping old allocation on dict_keys(['7bdf675d-15ae-4a4b-9c03-79d8c773b76b']) held by migration 0f8618b8-03e2-48c3-886c-a1c9bc490e78 for instance [ 1266.923503] env[68285]: DEBUG nova.scheduler.client.report [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Overwriting current allocation {'allocations': {'7bdf675d-15ae-4a4b-9c03-79d8c773b76b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 154}}, 'project_id': '7106da1f6bcb4d0cb3dcad984b3adb33', 'user_id': 'fee422406a774be7830837baa9743f0e', 'consumer_generation': 1} on consumer 5abddda1-9bf7-4039-81c7-8622f43cc72e {{(pid=68285) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1266.932867] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.933072] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1266.933319] env[68285]: DEBUG nova.network.neutron [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1266.955166] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52283b4f-f3b5-c108-eb2f-51c64a70ae4e, 'name': SearchDatastore_Task, 'duration_secs': 0.01066} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.955497] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1266.955684] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/852ab501-00a6-442b-804a-1bbf49a2be8c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1266.955939] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fddb4671-f5f3-4674-a26c-e2ffba36873f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.963600] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1266.963600] env[68285]: value = "task-2892428" [ 1266.963600] env[68285]: _type = "Task" [ 1266.963600] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.975133] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.032053] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.032053] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.037198] env[68285]: DEBUG nova.network.neutron [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1267.232169] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.066s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.232796] env[68285]: DEBUG nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1267.236073] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.634s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.236285] env[68285]: DEBUG nova.objects.instance [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'resources' on Instance uuid 68aee959-4168-43a7-a8d1-e6e126a52da5 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1267.474670] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462686} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.474948] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/852ab501-00a6-442b-804a-1bbf49a2be8c.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1267.475194] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1267.475451] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9898121-739e-41ae-a07f-34dfd955911f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.483358] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1267.483358] env[68285]: value = "task-2892429" [ 1267.483358] env[68285]: _type = "Task" [ 1267.483358] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.494506] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.739113] env[68285]: DEBUG nova.compute.utils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1267.740535] env[68285]: DEBUG nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1267.740704] env[68285]: DEBUG nova.network.neutron [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1267.802076] env[68285]: DEBUG nova.policy [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32693cebfff14740987c0b5129abca90', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e29d4b330861437386054127da2a6872', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1267.850056] env[68285]: DEBUG nova.network.neutron [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance_info_cache with network_info: [{"id": "d00eb1aa-97af-4a18-9582-416989e71604", "address": "fa:16:3e:0f:89:60", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd00eb1aa-97", "ovs_interfaceid": "d00eb1aa-97af-4a18-9582-416989e71604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.973935] env[68285]: DEBUG nova.network.neutron [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [{"id": "10900535-c864-4616-a243-0798b3cdb70a", "address": "fa:16:3e:49:12:6b", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10900535-c8", "ovs_interfaceid": "10900535-c864-4616-a243-0798b3cdb70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.000891] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069666} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.001224] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1268.002107] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a128133f-7976-49fb-87e5-3b4158d00693 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.027671] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/852ab501-00a6-442b-804a-1bbf49a2be8c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1268.030728] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fcadfc7-f3b7-40d2-9309-24b28292b4d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.060449] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1268.060449] env[68285]: value = "task-2892430" [ 1268.060449] env[68285]: _type = "Task" [ 1268.060449] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.066943] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affaa3a6-470b-43a3-a857-b98bc44a2a5a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.079608] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d1054f-1341-4c34-be87-5abc8640412e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.084396] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.114536] env[68285]: DEBUG nova.network.neutron [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Successfully created port: c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1268.117542] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673e43cb-d6aa-40a6-960d-217453581b51 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.125900] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a9f5db-8f3b-4a6f-97f5-e67caa028922 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.140572] env[68285]: DEBUG nova.compute.provider_tree [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.246145] env[68285]: DEBUG nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1268.355363] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.478144] env[68285]: DEBUG oslo_concurrency.lockutils [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-5abddda1-9bf7-4039-81c7-8622f43cc72e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.479165] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40069f0-63af-42e0-b1ca-04f8f13f3eda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.488890] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9a958c-ae8b-4ac5-a0c7-d4cd8391320d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.579656] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.648281] env[68285]: DEBUG nova.scheduler.client.report [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1268.883212] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29acea3-a21a-4be7-a62e-c9fc07314655 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.903848] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15db2747-b864-44d3-a283-13f965bf6b7a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.913160] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance 'd4818c98-8134-4426-bd35-b2339ed6abd4' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1269.076804] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892430, 'name': ReconfigVM_Task, 'duration_secs': 0.717558} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.076804] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/852ab501-00a6-442b-804a-1bbf49a2be8c.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1269.078046] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00ca5513-fa31-4f19-9cbc-fbe83010f9c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.088034] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1269.088034] env[68285]: value = "task-2892431" [ 1269.088034] env[68285]: _type = "Task" [ 1269.088034] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.098811] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892431, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.152147] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.156017] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.490s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.156017] env[68285]: DEBUG nova.objects.instance [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'resources' on Instance uuid a4fc942a-03e7-4415-bd95-f1f0e1344a69 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.179020] env[68285]: INFO nova.scheduler.client.report [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted allocations for instance 68aee959-4168-43a7-a8d1-e6e126a52da5 [ 1269.260643] env[68285]: DEBUG nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1269.301168] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1269.301168] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.301168] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1269.301168] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.301168] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1269.301487] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1269.301487] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1269.301735] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1269.301874] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1269.302078] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1269.302266] env[68285]: DEBUG nova.virt.hardware [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1269.303568] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6f46fc-7b12-48d3-b82e-5e7b785b963f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.313107] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5688be5-097b-45a6-9587-7c2cec3065ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.419473] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1269.419820] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22849725-77a0-45a2-8bb5-49e16bd8e54c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.428398] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1269.428398] env[68285]: value = "task-2892432" [ 1269.428398] env[68285]: _type = "Task" [ 1269.428398] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.437240] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.591207] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1269.591871] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b461425a-f50e-4ab8-ad5c-64eb3273d58b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.606727] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892431, 'name': Rename_Task, 'duration_secs': 0.227176} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.608140] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1269.608460] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1269.608460] env[68285]: value = "task-2892433" [ 1269.608460] env[68285]: _type = "Task" [ 1269.608460] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.608646] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d12d8052-f98a-4df3-b018-eadaef376b82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.619966] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.621708] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1269.621708] env[68285]: value = "task-2892434" [ 1269.621708] env[68285]: _type = "Task" [ 1269.621708] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.630211] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.691286] env[68285]: DEBUG oslo_concurrency.lockutils [None req-53508f23-147b-424b-a9c8-0d450d1fca8f tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "68aee959-4168-43a7-a8d1-e6e126a52da5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.747s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.945793] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892432, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.979062] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bea4fc-05cb-4e0b-9d0f-fed2bde54dfb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.987946] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e59941b-6139-466a-93c8-c205ec511ce5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.022403] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7123b88-1570-4035-a7ad-7cfd9b5c0339 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.031308] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4989bf-9939-4241-ae1f-22189c766ba7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.046668] env[68285]: DEBUG nova.compute.provider_tree [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.125959] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892433, 'name': PowerOffVM_Task, 'duration_secs': 0.244293} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.130925] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1270.131884] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1270.134461] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1270.144878] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d26a0e0-ba5d-4323-a958-22f4ac161ff9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.170635] env[68285]: DEBUG nova.compute.manager [req-715080c8-dcd2-487a-bf9c-2ad3a9084cc3 req-44447107-9f9d-4167-9bc5-22158788e2f8 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Received event network-vif-plugged-c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1270.170898] env[68285]: DEBUG oslo_concurrency.lockutils [req-715080c8-dcd2-487a-bf9c-2ad3a9084cc3 req-44447107-9f9d-4167-9bc5-22158788e2f8 service nova] Acquiring lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.171082] env[68285]: DEBUG oslo_concurrency.lockutils [req-715080c8-dcd2-487a-bf9c-2ad3a9084cc3 req-44447107-9f9d-4167-9bc5-22158788e2f8 service nova] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.171248] env[68285]: DEBUG oslo_concurrency.lockutils [req-715080c8-dcd2-487a-bf9c-2ad3a9084cc3 req-44447107-9f9d-4167-9bc5-22158788e2f8 service nova] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.171407] env[68285]: DEBUG nova.compute.manager [req-715080c8-dcd2-487a-bf9c-2ad3a9084cc3 req-44447107-9f9d-4167-9bc5-22158788e2f8 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] No waiting events found dispatching network-vif-plugged-c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1270.171565] env[68285]: WARNING nova.compute.manager [req-715080c8-dcd2-487a-bf9c-2ad3a9084cc3 req-44447107-9f9d-4167-9bc5-22158788e2f8 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Received unexpected event network-vif-plugged-c5dfeee8-308e-441b-8f3c-84d8c2738b4e for instance with vm_state building and task_state spawning. [ 1270.178425] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892434, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.180026] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1270.180026] env[68285]: value = "task-2892438" [ 1270.180026] env[68285]: _type = "Task" [ 1270.180026] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.190731] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.218828] env[68285]: DEBUG nova.network.neutron [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Successfully updated port: c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1270.440391] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892432, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.550086] env[68285]: DEBUG nova.scheduler.client.report [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.638857] env[68285]: DEBUG oslo_vmware.api [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892434, 'name': PowerOnVM_Task, 'duration_secs': 0.698567} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.639291] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1270.639650] env[68285]: INFO nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1270.639940] env[68285]: DEBUG nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1270.640859] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461e1372-8397-41ad-8eab-9ad4ccfc62fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.690554] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892438, 'name': ReconfigVM_Task, 'duration_secs': 0.184235} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.691720] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914e5c43-61e5-4129-9be4-adc655ea65dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.714024] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1270.714273] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1270.714467] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1270.714669] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1270.714819] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1270.714969] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1270.715192] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1270.715352] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1270.715518] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1270.715680] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1270.715852] env[68285]: DEBUG nova.virt.hardware [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1270.717393] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbb3c858-f209-4d85-886d-dd01175d7eb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.720854] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.721048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquired lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.721220] env[68285]: DEBUG nova.network.neutron [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1270.724306] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1270.724306] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]525d5a8d-fa9f-5fd0-d3fa-a2edb3dc6d71" [ 1270.724306] env[68285]: _type = "Task" [ 1270.724306] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.733964] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525d5a8d-fa9f-5fd0-d3fa-a2edb3dc6d71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.943978] env[68285]: DEBUG oslo_vmware.api [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892432, 'name': PowerOnVM_Task, 'duration_secs': 1.246472} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.944453] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1270.944609] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8663b14e-4846-45c2-b846-17498652bbf1 tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance 'd4818c98-8134-4426-bd35-b2339ed6abd4' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1271.055723] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.058494] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.669s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.058589] env[68285]: DEBUG nova.objects.instance [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lazy-loading 'resources' on Instance uuid bd3c9b84-794d-4302-bfb2-1181d5ad9552 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1271.083242] env[68285]: INFO nova.scheduler.client.report [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted allocations for instance a4fc942a-03e7-4415-bd95-f1f0e1344a69 [ 1271.161474] env[68285]: INFO nova.compute.manager [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Took 18.11 seconds to build instance. [ 1271.239044] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]525d5a8d-fa9f-5fd0-d3fa-a2edb3dc6d71, 'name': SearchDatastore_Task, 'duration_secs': 0.011438} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.245073] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfiguring VM instance instance-00000059 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1271.245904] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b57f04e2-99df-429b-9162-bd26ecc3601c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.267775] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1271.267775] env[68285]: value = "task-2892439" [ 1271.267775] env[68285]: _type = "Task" [ 1271.267775] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.278265] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.279388] env[68285]: DEBUG nova.network.neutron [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1271.531560] env[68285]: DEBUG nova.network.neutron [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Updating instance_info_cache with network_info: [{"id": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "address": "fa:16:3e:1b:59:67", "network": {"id": "a28978b6-c957-4db1-a22f-b74387b6172e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-968847195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e29d4b330861437386054127da2a6872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfeee8-30", "ovs_interfaceid": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.599033] env[68285]: DEBUG oslo_concurrency.lockutils [None req-ccd66deb-07fc-4005-9cc3-3e7cfcb89aca tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "a4fc942a-03e7-4415-bd95-f1f0e1344a69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.527s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.667410] env[68285]: DEBUG oslo_concurrency.lockutils [None req-901588d2-4294-43ea-9051-a9c8207cf808 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.623s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.782732] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892439, 'name': ReconfigVM_Task, 'duration_secs': 0.225096} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.785154] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfigured VM instance instance-00000059 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1271.786094] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027f193e-0083-4843-9044-fd3180947d1c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.811091] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.813719] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa8dee08-204c-4cbb-aca0-674be6ba7b25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.838025] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1271.838025] env[68285]: value = "task-2892440" [ 1271.838025] env[68285]: _type = "Task" [ 1271.838025] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.840363] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f3fcf6-61a1-4c2b-81db-9025ae67030a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.846332] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.851873] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c78838d-db88-4d40-abd8-663546d2ca73 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.886916] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ef43da-c2a4-40bc-be32-4fbf65b4bb0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.890207] env[68285]: DEBUG nova.compute.manager [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Received event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1271.890388] env[68285]: DEBUG nova.compute.manager [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing instance network info cache due to event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1271.890596] env[68285]: DEBUG oslo_concurrency.lockutils [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] Acquiring lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.890931] env[68285]: DEBUG oslo_concurrency.lockutils [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] Acquired lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.890931] env[68285]: DEBUG nova.network.neutron [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1271.898562] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d285f61d-8d4e-4878-ade6-3ab4ca9fefb7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.914068] env[68285]: DEBUG nova.compute.provider_tree [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.034855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Releasing lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.035310] env[68285]: DEBUG nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Instance network_info: |[{"id": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "address": "fa:16:3e:1b:59:67", "network": {"id": "a28978b6-c957-4db1-a22f-b74387b6172e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-968847195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e29d4b330861437386054127da2a6872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfeee8-30", "ovs_interfaceid": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1272.035873] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:59:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5dfeee8-308e-441b-8f3c-84d8c2738b4e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1272.045284] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Creating folder: Project (e29d4b330861437386054127da2a6872). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1272.045663] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4ed43da-7f40-4fd9-b3ec-0d68cec16b4a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.060408] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Created folder: Project (e29d4b330861437386054127da2a6872) in parent group-v580775. [ 1272.060631] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Creating folder: Instances. Parent ref: group-v581073. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1272.060975] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95f11695-eb0c-496f-bc99-23a32d437b65 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.072132] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Created folder: Instances in parent group-v581073. [ 1272.072412] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1272.072632] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1272.072923] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d2b12e0-93bc-4c56-9dc1-49f322053e86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.095561] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1272.095561] env[68285]: value = "task-2892443" [ 1272.095561] env[68285]: _type = "Task" [ 1272.095561] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.104721] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892443, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.207117] env[68285]: DEBUG nova.compute.manager [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Received event network-changed-c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1272.207117] env[68285]: DEBUG nova.compute.manager [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Refreshing instance network info cache due to event network-changed-c5dfeee8-308e-441b-8f3c-84d8c2738b4e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1272.207117] env[68285]: DEBUG oslo_concurrency.lockutils [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] Acquiring lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.207117] env[68285]: DEBUG oslo_concurrency.lockutils [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] Acquired lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.207117] env[68285]: DEBUG nova.network.neutron [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Refreshing network info cache for port c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1272.292034] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.292357] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.345817] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892440, 'name': ReconfigVM_Task, 'duration_secs': 0.321247} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.346108] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e/5abddda1-9bf7-4039-81c7-8622f43cc72e.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1272.347058] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad11032-5799-456f-808f-e4d54bd16db2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.375496] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39bfa6a-7a38-42b7-9758-4eaeb1d224f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.406283] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68e08fd-c7bb-41e0-934a-33a0aba7ef10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.436010] env[68285]: DEBUG nova.scheduler.client.report [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1272.437113] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52695186-5140-437a-992f-a5a6e6c19a76 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.449701] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.449976] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-994f361d-0818-4b85-904b-91d097588f95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.459125] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1272.459125] env[68285]: value = "task-2892444" [ 1272.459125] env[68285]: _type = "Task" [ 1272.459125] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.473432] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.609230] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892443, 'name': CreateVM_Task, 'duration_secs': 0.363338} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.612287] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1272.612982] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.613176] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.613456] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1272.614180] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42bf7a2c-dae4-4671-b410-f0c8d846e85f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.622324] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1272.622324] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5290062a-a164-2d40-28a0-f41247e5080a" [ 1272.622324] env[68285]: _type = "Task" [ 1272.622324] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.631571] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5290062a-a164-2d40-28a0-f41247e5080a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.688390] env[68285]: DEBUG nova.network.neutron [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updated VIF entry in instance network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1272.688839] env[68285]: DEBUG nova.network.neutron [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.718218] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "8a649b1e-d007-4032-a46c-b479365e5289" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.718467] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "8a649b1e-d007-4032-a46c-b479365e5289" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.718689] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "8a649b1e-d007-4032-a46c-b479365e5289-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.718874] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "8a649b1e-d007-4032-a46c-b479365e5289-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.719056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "8a649b1e-d007-4032-a46c-b479365e5289-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.720961] env[68285]: INFO nova.compute.manager [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Terminating instance [ 1272.795281] env[68285]: DEBUG nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1272.943997] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.949557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.461s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.949557] env[68285]: INFO nova.compute.claims [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.975052] env[68285]: DEBUG oslo_vmware.api [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892444, 'name': PowerOnVM_Task, 'duration_secs': 0.407022} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.975210] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1272.987458] env[68285]: INFO nova.scheduler.client.report [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Deleted allocations for instance bd3c9b84-794d-4302-bfb2-1181d5ad9552 [ 1273.068470] env[68285]: DEBUG nova.network.neutron [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Updated VIF entry in instance network info cache for port c5dfeee8-308e-441b-8f3c-84d8c2738b4e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1273.068836] env[68285]: DEBUG nova.network.neutron [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Updating instance_info_cache with network_info: [{"id": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "address": "fa:16:3e:1b:59:67", "network": {"id": "a28978b6-c957-4db1-a22f-b74387b6172e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-968847195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e29d4b330861437386054127da2a6872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfeee8-30", "ovs_interfaceid": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.133284] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5290062a-a164-2d40-28a0-f41247e5080a, 'name': SearchDatastore_Task, 'duration_secs': 0.01138} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.133708] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.134035] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1273.134348] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.134552] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.134809] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1273.135175] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a675900-f662-4c9c-bc73-38fa4d2697a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.156959] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1273.157236] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1273.158310] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e77859b9-a000-4819-b6ec-064d780cc37f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.166182] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1273.166182] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528dac83-1c6d-2818-dd8b-60c9b795147b" [ 1273.166182] env[68285]: _type = "Task" [ 1273.166182] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.175077] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528dac83-1c6d-2818-dd8b-60c9b795147b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.191902] env[68285]: DEBUG oslo_concurrency.lockutils [req-a46a755e-fe8d-4004-8480-7c0f90259aaa req-305c5436-74a4-4d2e-b514-45c72bfa4bce service nova] Releasing lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.206729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "d4818c98-8134-4426-bd35-b2339ed6abd4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.206968] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.207181] env[68285]: DEBUG nova.compute.manager [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Going to confirm migration 7 {{(pid=68285) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1273.225016] env[68285]: DEBUG nova.compute.manager [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1273.225249] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1273.226156] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68665d63-5ca6-4b4a-99f9-7447b0e8fa06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.235508] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1273.235753] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee6d25be-7690-4a65-b521-a4f4ab0bb8f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.242224] env[68285]: DEBUG oslo_vmware.api [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1273.242224] env[68285]: value = "task-2892446" [ 1273.242224] env[68285]: _type = "Task" [ 1273.242224] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.251114] env[68285]: DEBUG oslo_vmware.api [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.315260] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.497042] env[68285]: DEBUG oslo_concurrency.lockutils [None req-58214cd2-bab7-4963-9772-dd9b291a5d60 tempest-ImagesNegativeTestJSON-1804712379 tempest-ImagesNegativeTestJSON-1804712379-project-member] Lock "bd3c9b84-794d-4302-bfb2-1181d5ad9552" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.758s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.571360] env[68285]: DEBUG oslo_concurrency.lockutils [req-69196df6-f81e-4fb0-acec-b1f0a5942be9 req-46439ef4-ac3d-4ad9-a802-8885a1f99e8c service nova] Releasing lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.680190] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528dac83-1c6d-2818-dd8b-60c9b795147b, 'name': SearchDatastore_Task, 'duration_secs': 0.020338} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.681226] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dab6a81-b3f9-40e2-9785-2047ec281b31 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.688593] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1273.688593] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5243ea59-927b-6ce0-2ff5-0212f178c869" [ 1273.688593] env[68285]: _type = "Task" [ 1273.688593] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.700204] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5243ea59-927b-6ce0-2ff5-0212f178c869, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.753883] env[68285]: DEBUG oslo_vmware.api [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892446, 'name': PowerOffVM_Task, 'duration_secs': 0.409703} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.754316] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1273.754586] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1273.754928] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5a6d2d6-c10b-45c0-bfc0-acf7318af080 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.845295] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.845772] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquired lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.845884] env[68285]: DEBUG nova.network.neutron [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1273.846147] env[68285]: DEBUG nova.objects.instance [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'info_cache' on Instance uuid d4818c98-8134-4426-bd35-b2339ed6abd4 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1273.848886] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1273.849701] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1273.850040] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleting the datastore file [datastore1] 8a649b1e-d007-4032-a46c-b479365e5289 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1273.850717] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd6dbc99-3476-4ef5-b8ba-f01ad6e9ac1b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.859291] env[68285]: DEBUG oslo_vmware.api [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1273.859291] env[68285]: value = "task-2892448" [ 1273.859291] env[68285]: _type = "Task" [ 1273.859291] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.870481] env[68285]: DEBUG oslo_vmware.api [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.034431] env[68285]: INFO nova.compute.manager [None req-dcf6ed3a-61fd-429b-a369-1880a36cc951 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance to original state: 'active' [ 1274.179975] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc26a3da-1e44-4f2d-a321-eee0a00d7b84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.195242] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f64a842-ae07-4ac9-89de-d3c5754dc701 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.205429] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5243ea59-927b-6ce0-2ff5-0212f178c869, 'name': SearchDatastore_Task, 'duration_secs': 0.020958} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.230173] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.230531] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6/51bdaa10-0cf3-4052-9f5c-7d4dad565fd6.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1274.231389] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbe7ee42-5a51-437d-ba90-e3a0097c1418 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.234009] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0db728-80f4-497a-878d-5eed00700959 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.244362] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87b286c-fab5-4d41-9d00-55e06f29214d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.249035] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1274.249035] env[68285]: value = "task-2892449" [ 1274.249035] env[68285]: _type = "Task" [ 1274.249035] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.262139] env[68285]: DEBUG nova.compute.provider_tree [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1274.270234] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892449, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.372570] env[68285]: DEBUG oslo_vmware.api [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.759785] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892449, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.783197] env[68285]: ERROR nova.scheduler.client.report [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [req-aa9107f7-ffcf-424b-a746-4ffe864ebe39] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-aa9107f7-ffcf-424b-a746-4ffe864ebe39"}]} [ 1274.799166] env[68285]: DEBUG nova.scheduler.client.report [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1274.812467] env[68285]: DEBUG nova.scheduler.client.report [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1274.812725] env[68285]: DEBUG nova.compute.provider_tree [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1274.823491] env[68285]: DEBUG nova.scheduler.client.report [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1274.840608] env[68285]: DEBUG nova.scheduler.client.report [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1274.870705] env[68285]: DEBUG oslo_vmware.api [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.624075} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.870999] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1274.871208] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1274.871405] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1274.871557] env[68285]: INFO nova.compute.manager [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1274.871804] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1274.874751] env[68285]: DEBUG nova.compute.manager [-] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1274.875608] env[68285]: DEBUG nova.network.neutron [-] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1274.958700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.958925] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.080682] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeca80c-7ac8-4918-9643-0ae2085eaba1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.093472] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1bdee4-7320-49b7-9c67-a1585eed0d3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.131285] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0fd860-b03a-4102-8369-ad4db5612589 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.140801] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b006e89-a09b-4eea-86aa-8180e0f92421 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.158653] env[68285]: DEBUG nova.compute.provider_tree [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1275.251695] env[68285]: DEBUG nova.network.neutron [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance_info_cache with network_info: [{"id": "d00eb1aa-97af-4a18-9582-416989e71604", "address": "fa:16:3e:0f:89:60", "network": {"id": "32856f2d-088b-42f4-8ead-ce388babc208", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1769481995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9352aafac6e049feb8d74a91d1600224", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd00eb1aa-97", "ovs_interfaceid": "d00eb1aa-97af-4a18-9582-416989e71604", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.263557] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892449, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.332785] env[68285]: DEBUG nova.compute.manager [req-57dfc6e3-71d8-4779-81db-c98616a77c69 req-b4725be7-933b-40b0-9ba8-9e5ca98617df service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Received event network-vif-deleted-b3858df6-8c44-4434-93e6-cc789ec6f4c3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1275.333076] env[68285]: INFO nova.compute.manager [req-57dfc6e3-71d8-4779-81db-c98616a77c69 req-b4725be7-933b-40b0-9ba8-9e5ca98617df service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Neutron deleted interface b3858df6-8c44-4434-93e6-cc789ec6f4c3; detaching it from the instance and deleting it from the info cache [ 1275.333229] env[68285]: DEBUG nova.network.neutron [req-57dfc6e3-71d8-4779-81db-c98616a77c69 req-b4725be7-933b-40b0-9ba8-9e5ca98617df service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.461576] env[68285]: DEBUG nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1275.700304] env[68285]: DEBUG nova.scheduler.client.report [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1275.700621] env[68285]: DEBUG nova.compute.provider_tree [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 157 to 158 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1275.700869] env[68285]: DEBUG nova.compute.provider_tree [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1275.704627] env[68285]: DEBUG nova.network.neutron [-] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.757847] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Releasing lock "refresh_cache-d4818c98-8134-4426-bd35-b2339ed6abd4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.758180] env[68285]: DEBUG nova.objects.instance [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lazy-loading 'migration_context' on Instance uuid d4818c98-8134-4426-bd35-b2339ed6abd4 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.766360] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892449, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.836801] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95fb0cb7-80f6-4a78-8c2c-c0bbf674505a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.847914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f8177b-3e81-447c-975b-2f5b0e721c34 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.886798] env[68285]: DEBUG nova.compute.manager [req-57dfc6e3-71d8-4779-81db-c98616a77c69 req-b4725be7-933b-40b0-9ba8-9e5ca98617df service nova] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Detach interface failed, port_id=b3858df6-8c44-4434-93e6-cc789ec6f4c3, reason: Instance 8a649b1e-d007-4032-a46c-b479365e5289 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1275.920706] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.920948] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.921182] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.921366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.921531] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.923831] env[68285]: INFO nova.compute.manager [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Terminating instance [ 1275.985211] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.207913] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.261s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.208308] env[68285]: DEBUG nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1276.211114] env[68285]: INFO nova.compute.manager [-] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Took 1.34 seconds to deallocate network for instance. [ 1276.211629] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.897s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.213050] env[68285]: INFO nova.compute.claims [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1276.261424] env[68285]: DEBUG nova.objects.base [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1276.262531] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40629ce-be69-4c62-9e6e-e0bf3dc0b324 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.269538] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892449, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.541527} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.270188] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6/51bdaa10-0cf3-4052-9f5c-7d4dad565fd6.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1276.270430] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1276.270698] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e34dd3fe-b5ac-4f14-ad17-42c896e691f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.286615] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-277f64eb-11f7-4a58-9be7-4d981ba0daf9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.292198] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1276.292198] env[68285]: value = "task-2892451" [ 1276.292198] env[68285]: _type = "Task" [ 1276.292198] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.293130] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1276.293130] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd89ef-a47e-624b-bee6-b996a626f6dc" [ 1276.293130] env[68285]: _type = "Task" [ 1276.293130] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.307105] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52fd89ef-a47e-624b-bee6-b996a626f6dc, 'name': SearchDatastore_Task, 'duration_secs': 0.012273} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.310297] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.310550] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.364633] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-8917672f-3b0d-42a1-b8b1-94ac47ce941a-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.365060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-8917672f-3b0d-42a1-b8b1-94ac47ce941a-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.365538] env[68285]: DEBUG nova.objects.instance [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'flavor' on Instance uuid 8917672f-3b0d-42a1-b8b1-94ac47ce941a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1276.427290] env[68285]: DEBUG nova.compute.manager [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1276.427460] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1276.427727] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8098a9cc-a1bb-4477-998c-61240f0ad3aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.436493] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1276.436493] env[68285]: value = "task-2892452" [ 1276.436493] env[68285]: _type = "Task" [ 1276.436493] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.445062] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.713414] env[68285]: DEBUG nova.compute.utils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1276.714849] env[68285]: DEBUG nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1276.715267] env[68285]: DEBUG nova.network.neutron [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1276.721080] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.761257] env[68285]: DEBUG nova.policy [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb7f978e7fa64e88af5756fca97fce6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4471597d3345443aa28b97acd91847e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1276.803043] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.205099} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.803352] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1276.804081] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e13e83f-805c-41bc-ba4a-50013f6155b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.827544] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6/51bdaa10-0cf3-4052-9f5c-7d4dad565fd6.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1276.827824] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43f59ba1-8740-441c-b0fb-3952320f15d7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.848510] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1276.848510] env[68285]: value = "task-2892453" [ 1276.848510] env[68285]: _type = "Task" [ 1276.848510] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.856607] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.936110] env[68285]: DEBUG nova.objects.instance [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'pci_requests' on Instance uuid 8917672f-3b0d-42a1-b8b1-94ac47ce941a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1276.947495] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892452, 'name': PowerOffVM_Task, 'duration_secs': 0.231418} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.948456] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.948701] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1276.948918] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581046', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'name': 'volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5abddda1-9bf7-4039-81c7-8622f43cc72e', 'attached_at': '2025-03-10T15:59:09.000000', 'detached_at': '', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'serial': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1276.949656] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76e2f36-3f04-4d1f-b0a8-4841600792b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.971417] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f13e394-bdcf-4a0d-b688-fe45d78f2a6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.979668] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79c5337-922d-41d0-8d92-b66f9b321f0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.004928] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d08882-7fb1-464b-8ed7-e0ea6e012735 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.021020] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] The volume has not been displaced from its original location: [datastore2] volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa/volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1277.026314] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfiguring VM instance instance-00000059 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1277.026639] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce24b3ee-3557-4ff4-be96-b578baf46db0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.041118] env[68285]: DEBUG nova.network.neutron [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Successfully created port: c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1277.049837] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1277.049837] env[68285]: value = "task-2892454" [ 1277.049837] env[68285]: _type = "Task" [ 1277.049837] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.059213] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.218265] env[68285]: DEBUG nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1277.361392] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892453, 'name': ReconfigVM_Task, 'duration_secs': 0.26759} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.364357] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6/51bdaa10-0cf3-4052-9f5c-7d4dad565fd6.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1277.365319] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-289afae0-d363-4b34-ba27-58345de580d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.374909] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1277.374909] env[68285]: value = "task-2892456" [ 1277.374909] env[68285]: _type = "Task" [ 1277.374909] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.389559] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892456, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.442570] env[68285]: DEBUG nova.objects.base [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Object Instance<8917672f-3b0d-42a1-b8b1-94ac47ce941a> lazy-loaded attributes: flavor,pci_requests {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1277.442782] env[68285]: DEBUG nova.network.neutron [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1277.450771] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cd24e4-f5c5-4ed5-bccb-87c3566a5ff8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.459953] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a63107-6340-49eb-8048-62684864a1e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.493130] env[68285]: DEBUG nova.policy [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1277.495327] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c9d025-0aae-4d9b-9f93-008845c61119 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.503889] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d150db76-8208-4b6f-84fc-e1ad07f4ff56 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.519435] env[68285]: DEBUG nova.compute.provider_tree [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.560025] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892454, 'name': ReconfigVM_Task, 'duration_secs': 0.220633} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.560302] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Reconfigured VM instance instance-00000059 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1277.565159] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb5ec5ca-d708-4c0f-8597-f9646d8d5c52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.580689] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1277.580689] env[68285]: value = "task-2892457" [ 1277.580689] env[68285]: _type = "Task" [ 1277.580689] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.588951] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.723662] env[68285]: INFO nova.virt.block_device [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Booting with volume bb993b4e-ec19-499c-a196-764a30b67abe at /dev/sda [ 1277.761017] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae0df17c-9239-4df5-a7a4-e74b86c878d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.769687] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa6a9a7-db39-4eb0-aa8b-a39c74fd10ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.781056] env[68285]: DEBUG nova.network.neutron [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Successfully created port: 11b23fc2-86d2-4f1a-9430-8afb7438275a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1277.805471] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf14eeef-401c-4d0c-9a23-37579379905c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.814553] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc25c3b-55ff-4fea-9cb4-c540b263c85f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.850243] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc59172a-7472-4930-a2ce-5dd2a238427a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.857212] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cdab67-4bab-4b7c-a1e5-1d304c93444d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.872629] env[68285]: DEBUG nova.virt.block_device [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating existing volume attachment record: 772459be-4b28-463d-af6b-506a146f25de {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1277.886991] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892456, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.023216] env[68285]: DEBUG nova.scheduler.client.report [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1278.091471] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.392711] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892456, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.522349] env[68285]: DEBUG nova.network.neutron [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Successfully updated port: c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1278.529353] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.529353] env[68285]: DEBUG nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1278.532123] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.547s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.534030] env[68285]: INFO nova.compute.claims [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1278.592715] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.889051] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892456, 'name': Rename_Task, 'duration_secs': 1.183843} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.889398] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1278.889675] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f25b9bc9-29e7-43c2-96fc-7a8b8f078966 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.896390] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1278.896390] env[68285]: value = "task-2892458" [ 1278.896390] env[68285]: _type = "Task" [ 1278.896390] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.903904] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.025279] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.026129] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.026129] env[68285]: DEBUG nova.network.neutron [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1279.039214] env[68285]: DEBUG nova.compute.utils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1279.042751] env[68285]: DEBUG nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1279.042940] env[68285]: DEBUG nova.network.neutron [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1279.080078] env[68285]: DEBUG nova.policy [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080ca112b7534d1284942bdd41514e66', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '923c0329269c41159ae4469d358fe25f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1279.092604] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892457, 'name': ReconfigVM_Task, 'duration_secs': 1.130058} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.092919] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581046', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'name': 'volume-ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5abddda1-9bf7-4039-81c7-8622f43cc72e', 'attached_at': '2025-03-10T15:59:09.000000', 'detached_at': '', 'volume_id': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa', 'serial': 'ec4ef04a-d50d-4304-a9ba-f84bebf4f9fa'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1279.093257] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1279.094048] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd26e14f-28de-4cf1-afc2-fa1c4e669e51 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.101061] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1279.101301] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f80c45ac-0fe6-4274-a390-cabb4a48bb24 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.177051] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1279.177373] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1279.177634] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleting the datastore file [datastore1] 5abddda1-9bf7-4039-81c7-8622f43cc72e {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1279.177985] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e67ae9d4-ea00-4067-b456-42c9025e72c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.188544] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1279.188544] env[68285]: value = "task-2892460" [ 1279.188544] env[68285]: _type = "Task" [ 1279.188544] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.197113] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.218090] env[68285]: DEBUG nova.network.neutron [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Successfully updated port: 11b23fc2-86d2-4f1a-9430-8afb7438275a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.363873] env[68285]: DEBUG nova.network.neutron [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Successfully created port: e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.407553] env[68285]: DEBUG oslo_vmware.api [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892458, 'name': PowerOnVM_Task, 'duration_secs': 0.495948} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.407553] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.407700] env[68285]: INFO nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Took 10.15 seconds to spawn the instance on the hypervisor. [ 1279.407811] env[68285]: DEBUG nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1279.408691] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a533cfe1-f490-4b52-8ec3-7ec53bf01646 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.549421] env[68285]: DEBUG nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1279.566687] env[68285]: DEBUG nova.network.neutron [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1279.702269] env[68285]: DEBUG oslo_vmware.api [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14947} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.705213] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1279.705431] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1279.705613] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1279.705787] env[68285]: INFO nova.compute.manager [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Took 3.28 seconds to destroy the instance on the hypervisor. [ 1279.706053] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1279.706529] env[68285]: DEBUG nova.compute.manager [-] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1279.706630] env[68285]: DEBUG nova.network.neutron [-] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1279.720672] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.720943] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.721204] env[68285]: DEBUG nova.network.neutron [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1279.766267] env[68285]: DEBUG nova.network.neutron [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [{"id": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "address": "fa:16:3e:12:c5:35", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc311cfc8-4f", "ovs_interfaceid": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.819817] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936b3eac-11d2-4b1b-be4a-58bf57c47f8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.831399] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdc9332-b99c-4f8a-bbd7-8b358223ab47 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.868982] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcd44a7-ca45-4191-b798-6ab9d8effe74 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.878447] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2b6d7e-15d3-4a9f-b615-7b7d552b9647 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.894829] env[68285]: DEBUG nova.compute.provider_tree [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1279.934186] env[68285]: INFO nova.compute.manager [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Took 20.64 seconds to build instance. [ 1279.955310] env[68285]: DEBUG nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1279.955843] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1279.956063] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.956224] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1279.956406] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.956559] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1279.956713] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1279.956919] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1279.957189] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1279.957379] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1279.957542] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1279.957712] env[68285]: DEBUG nova.virt.hardware [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1279.958828] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a33dc2-c23d-4b9b-aa13-8bada43329dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.968531] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e020249-dcc8-4ad5-9f5e-4e3660a74e7f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.261115] env[68285]: WARNING nova.network.neutron [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d already exists in list: networks containing: ['c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d']. ignoring it [ 1280.268680] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.269033] env[68285]: DEBUG nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Instance network_info: |[{"id": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "address": "fa:16:3e:12:c5:35", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc311cfc8-4f", "ovs_interfaceid": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1280.269377] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:c5:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c311cfc8-4f78-4068-8841-8aa0ce5243c2', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.277167] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1280.277402] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: faf810ae-7823-4115-a709-99dc7c480867] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1280.277654] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a90d0a4c-4b42-4051-88d7-d6b26277c223 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.300290] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.300290] env[68285]: value = "task-2892462" [ 1280.300290] env[68285]: _type = "Task" [ 1280.300290] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.311693] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892462, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.436126] env[68285]: DEBUG nova.scheduler.client.report [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 158 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1280.436421] env[68285]: DEBUG nova.compute.provider_tree [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 158 to 159 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1280.436601] env[68285]: DEBUG nova.compute.provider_tree [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1280.441471] env[68285]: DEBUG oslo_concurrency.lockutils [None req-561718f6-0457-493b-bf6d-33c8afda0d44 tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.160s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.556818] env[68285]: DEBUG nova.network.neutron [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "11b23fc2-86d2-4f1a-9430-8afb7438275a", "address": "fa:16:3e:df:3b:aa", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11b23fc2-86", "ovs_interfaceid": "11b23fc2-86d2-4f1a-9430-8afb7438275a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.559846] env[68285]: DEBUG nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1280.588019] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1280.588299] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1280.588456] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1280.588638] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1280.588786] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1280.588931] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1280.589161] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1280.589324] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1280.589489] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1280.589650] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1280.589823] env[68285]: DEBUG nova.virt.hardware [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1280.590951] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649a744f-bde1-4e90-acfd-78b602569d05 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.599260] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a331d9-4416-4a04-b9cf-0e60e53513c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.679447] env[68285]: DEBUG nova.network.neutron [-] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.813233] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892462, 'name': CreateVM_Task, 'duration_secs': 0.379785} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.813428] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: faf810ae-7823-4115-a709-99dc7c480867] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1280.814098] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'device_type': None, 'attachment_id': '772459be-4b28-463d-af6b-506a146f25de', 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581068', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'name': 'volume-bb993b4e-ec19-499c-a196-764a30b67abe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'faf810ae-7823-4115-a709-99dc7c480867', 'attached_at': '', 'detached_at': '', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'serial': 'bb993b4e-ec19-499c-a196-764a30b67abe'}, 'volume_type': None}], 'swap': None} {{(pid=68285) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1280.814323] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Root volume attach. Driver type: vmdk {{(pid=68285) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1280.815489] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca849bf-ceec-4707-bbd2-731d5ca4e230 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.834062] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26a03a9-34e8-4fee-be67-63d992a800b1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.840433] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed844c24-2956-48b9-a152-1ca36ff23178 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.846746] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2df55f28-5110-4814-827d-79d945da4b02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.853881] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1280.853881] env[68285]: value = "task-2892463" [ 1280.853881] env[68285]: _type = "Task" [ 1280.853881] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.861535] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.942275] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.942991] env[68285]: DEBUG nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1280.947021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.636s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.064298] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.064980] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.065148] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.066031] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c53e4ba-f88b-4741-b87d-63517619afac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.084942] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1281.085274] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1281.085481] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1281.085673] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1281.085838] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1281.085991] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1281.086233] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1281.086387] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1281.086549] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1281.086734] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1281.086920] env[68285]: DEBUG nova.virt.hardware [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1281.093961] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Reconfiguring VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1281.094445] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98385c92-5633-48df-a78e-0e37d4475b7a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.109305] env[68285]: DEBUG nova.network.neutron [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Successfully updated port: e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1281.118813] env[68285]: DEBUG oslo_vmware.api [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1281.118813] env[68285]: value = "task-2892464" [ 1281.118813] env[68285]: _type = "Task" [ 1281.118813] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.132205] env[68285]: DEBUG oslo_vmware.api [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892464, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.182906] env[68285]: INFO nova.compute.manager [-] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Took 1.48 seconds to deallocate network for instance. [ 1281.368236] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task} progress is 42%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.454286] env[68285]: DEBUG nova.compute.utils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1281.456323] env[68285]: DEBUG nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1281.456516] env[68285]: DEBUG nova.network.neutron [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1281.507510] env[68285]: DEBUG nova.policy [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41e116b3ac9d4c7386847a5559ea313c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43491d0bdffc49eaaad084f3124cffcb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1281.612193] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.612395] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.612529] env[68285]: DEBUG nova.network.neutron [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1281.638500] env[68285]: DEBUG oslo_vmware.api [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.736244] env[68285]: INFO nova.compute.manager [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Took 0.55 seconds to detach 1 volumes for instance. [ 1281.761863] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf05bc5-3000-4160-88fe-53757e82d63f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.773758] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59900d6b-d97e-42a9-9dc7-fdf661a07029 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.815193] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a571d0-a433-467f-80b5-96ed77c20457 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.825644] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6d03df-7ed5-4a67-8ed7-ffa4ec8a9d5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.842521] env[68285]: DEBUG nova.compute.provider_tree [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.867372] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task} progress is 54%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.883270] env[68285]: DEBUG nova.network.neutron [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Successfully created port: 07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1281.960609] env[68285]: DEBUG nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1282.133164] env[68285]: DEBUG oslo_vmware.api [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892464, 'name': ReconfigVM_Task, 'duration_secs': 0.76273} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.133711] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.134826] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Reconfigured VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1282.150851] env[68285]: DEBUG nova.network.neutron [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1282.246771] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.299124] env[68285]: DEBUG nova.network.neutron [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updating instance_info_cache with network_info: [{"id": "e449375e-9811-46ce-83ca-faf0266e4837", "address": "fa:16:3e:f1:b7:04", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape449375e-98", "ovs_interfaceid": "e449375e-9811-46ce-83ca-faf0266e4837", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.346232] env[68285]: DEBUG nova.scheduler.client.report [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1282.368022] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task} progress is 67%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.638764] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c0df0d75-fe1c-4276-8d14-14536d328833 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-8917672f-3b0d-42a1-b8b1-94ac47ce941a-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.274s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.803653] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.803653] env[68285]: DEBUG nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Instance network_info: |[{"id": "e449375e-9811-46ce-83ca-faf0266e4837", "address": "fa:16:3e:f1:b7:04", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape449375e-98", "ovs_interfaceid": "e449375e-9811-46ce-83ca-faf0266e4837", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1282.803932] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:b7:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e449375e-9811-46ce-83ca-faf0266e4837', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1282.811435] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1282.811666] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1282.812959] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4a93f0d-9b52-40f7-ad02-e76eef9fca4a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.834278] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1282.834278] env[68285]: value = "task-2892466" [ 1282.834278] env[68285]: _type = "Task" [ 1282.834278] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.843293] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892466, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.886104] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task} progress is 82%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.973257] env[68285]: DEBUG nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1283.007489] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1283.007752] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1283.007980] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1283.008225] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1283.008415] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1283.008588] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1283.008901] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1283.009144] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1283.009344] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1283.009541] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1283.009753] env[68285]: DEBUG nova.virt.hardware [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1283.010808] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bac8d6-a6f3-4b54-b383-a7e61e07ea52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.020896] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c977283-e7ac-4c75-99f5-e7821d24e883 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.345155] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892466, 'name': CreateVM_Task, 'duration_secs': 0.465351} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.345347] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1283.346120] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.346294] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.346679] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1283.346892] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03226947-8354-4c62-a88b-5a8e2cfd9cf0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.354399] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1283.354399] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e8e47e-0191-7663-5ee3-f1bc30ea3363" [ 1283.354399] env[68285]: _type = "Task" [ 1283.354399] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.360090] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.414s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1283.363056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.642s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.363302] env[68285]: DEBUG nova.objects.instance [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'resources' on Instance uuid 8a649b1e-d007-4032-a46c-b479365e5289 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1283.373987] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e8e47e-0191-7663-5ee3-f1bc30ea3363, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.379895] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task} progress is 97%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.519725] env[68285]: DEBUG nova.network.neutron [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Successfully updated port: 07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1283.866968] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e8e47e-0191-7663-5ee3-f1bc30ea3363, 'name': SearchDatastore_Task, 'duration_secs': 0.019885} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.869756] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.869756] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1283.869929] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.870059] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.871020] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1283.874705] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42955749-7edb-484e-8b90-fd4d4ad180db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.885473] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task} progress is 98%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.887378] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1283.887566] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1283.888301] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1931682-f6bc-48a5-9231-645c824d87e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.895034] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1283.895034] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520dec8e-b597-1f6e-1f98-cff4ca96201e" [ 1283.895034] env[68285]: _type = "Task" [ 1283.895034] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.903622] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520dec8e-b597-1f6e-1f98-cff4ca96201e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.932642] env[68285]: INFO nova.scheduler.client.report [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted allocation for migration 0d716d09-ecc5-4817-b49d-530c6d2f7096 [ 1284.022604] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.022914] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.022914] env[68285]: DEBUG nova.network.neutron [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1284.030524] env[68285]: DEBUG nova.compute.manager [req-2df91123-6a9e-4248-bc98-19cef896ec33 req-a163c155-7828-4ecc-a5ca-3450dcd80e44 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Received event network-vif-plugged-c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1284.031159] env[68285]: DEBUG oslo_concurrency.lockutils [req-2df91123-6a9e-4248-bc98-19cef896ec33 req-a163c155-7828-4ecc-a5ca-3450dcd80e44 service nova] Acquiring lock "faf810ae-7823-4115-a709-99dc7c480867-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.031159] env[68285]: DEBUG oslo_concurrency.lockutils [req-2df91123-6a9e-4248-bc98-19cef896ec33 req-a163c155-7828-4ecc-a5ca-3450dcd80e44 service nova] Lock "faf810ae-7823-4115-a709-99dc7c480867-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.031159] env[68285]: DEBUG oslo_concurrency.lockutils [req-2df91123-6a9e-4248-bc98-19cef896ec33 req-a163c155-7828-4ecc-a5ca-3450dcd80e44 service nova] Lock "faf810ae-7823-4115-a709-99dc7c480867-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.031303] env[68285]: DEBUG nova.compute.manager [req-2df91123-6a9e-4248-bc98-19cef896ec33 req-a163c155-7828-4ecc-a5ca-3450dcd80e44 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] No waiting events found dispatching network-vif-plugged-c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1284.031736] env[68285]: WARNING nova.compute.manager [req-2df91123-6a9e-4248-bc98-19cef896ec33 req-a163c155-7828-4ecc-a5ca-3450dcd80e44 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Received unexpected event network-vif-plugged-c311cfc8-4f78-4068-8841-8aa0ce5243c2 for instance with vm_state building and task_state spawning. [ 1284.051205] env[68285]: DEBUG nova.compute.manager [req-8b23b094-92a4-471e-a47d-e588f8bb3616 req-64c01bf0-deec-4023-9454-0d0eb94dffab service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received event network-vif-plugged-11b23fc2-86d2-4f1a-9430-8afb7438275a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1284.051577] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b23b094-92a4-471e-a47d-e588f8bb3616 req-64c01bf0-deec-4023-9454-0d0eb94dffab service nova] Acquiring lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.051656] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b23b094-92a4-471e-a47d-e588f8bb3616 req-64c01bf0-deec-4023-9454-0d0eb94dffab service nova] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.051777] env[68285]: DEBUG oslo_concurrency.lockutils [req-8b23b094-92a4-471e-a47d-e588f8bb3616 req-64c01bf0-deec-4023-9454-0d0eb94dffab service nova] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.051996] env[68285]: DEBUG nova.compute.manager [req-8b23b094-92a4-471e-a47d-e588f8bb3616 req-64c01bf0-deec-4023-9454-0d0eb94dffab service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] No waiting events found dispatching network-vif-plugged-11b23fc2-86d2-4f1a-9430-8afb7438275a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1284.052213] env[68285]: WARNING nova.compute.manager [req-8b23b094-92a4-471e-a47d-e588f8bb3616 req-64c01bf0-deec-4023-9454-0d0eb94dffab service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received unexpected event network-vif-plugged-11b23fc2-86d2-4f1a-9430-8afb7438275a for instance with vm_state active and task_state None. [ 1284.102621] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf19b74-3054-4dd9-8f84-1a816817a930 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.110701] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e638d552-56c8-4c42-b1f9-6c2461a1b4e3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.140748] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f600b6f-abb8-49d8-890b-5829ec0a7bb7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.148064] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bf93c7-e7ea-4a95-bc1a-ced582cd17a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.161197] env[68285]: DEBUG nova.compute.provider_tree [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1284.376636] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892463, 'name': RelocateVM_Task, 'duration_secs': 3.184513} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.377030] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1284.377309] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581068', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'name': 'volume-bb993b4e-ec19-499c-a196-764a30b67abe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'faf810ae-7823-4115-a709-99dc7c480867', 'attached_at': '', 'detached_at': '', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'serial': 'bb993b4e-ec19-499c-a196-764a30b67abe'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1284.378312] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbc0578-77c7-46cd-8212-641cb711b714 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.398835] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317ceab0-de4a-4112-80ff-394976bff1f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.406446] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520dec8e-b597-1f6e-1f98-cff4ca96201e, 'name': SearchDatastore_Task, 'duration_secs': 0.036483} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.424072] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] volume-bb993b4e-ec19-499c-a196-764a30b67abe/volume-bb993b4e-ec19-499c-a196-764a30b67abe.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1284.424337] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c8dbc4a-251c-46dd-9b7f-24f913f070f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.426654] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27fbdbca-2a11-4c40-adda-a7120c2d60b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.442788] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.236s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.447598] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1284.447598] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bb12d2-5453-cd79-92d7-58d13b94663f" [ 1284.447598] env[68285]: _type = "Task" [ 1284.447598] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.452971] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1284.452971] env[68285]: value = "task-2892468" [ 1284.452971] env[68285]: _type = "Task" [ 1284.452971] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.460910] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bb12d2-5453-cd79-92d7-58d13b94663f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.466412] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892468, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.578612] env[68285]: DEBUG nova.network.neutron [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1284.687389] env[68285]: ERROR nova.scheduler.client.report [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [req-d3091be9-33c0-4ea9-aee1-427f3d1d54d3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d3091be9-33c0-4ea9-aee1-427f3d1d54d3"}]} [ 1284.704376] env[68285]: DEBUG nova.scheduler.client.report [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1284.718382] env[68285]: DEBUG nova.scheduler.client.report [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1284.718639] env[68285]: DEBUG nova.compute.provider_tree [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1284.730121] env[68285]: DEBUG nova.scheduler.client.report [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1284.750124] env[68285]: DEBUG nova.scheduler.client.report [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1284.809657] env[68285]: DEBUG nova.network.neutron [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Updating instance_info_cache with network_info: [{"id": "07d808df-d1b1-42f4-8853-e537f5b160e0", "address": "fa:16:3e:7e:68:6d", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d808df-d1", "ovs_interfaceid": "07d808df-d1b1-42f4-8853-e537f5b160e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.838537] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-8917672f-3b0d-42a1-b8b1-94ac47ce941a-11b23fc2-86d2-4f1a-9430-8afb7438275a" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.838875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-8917672f-3b0d-42a1-b8b1-94ac47ce941a-11b23fc2-86d2-4f1a-9430-8afb7438275a" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.961512] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bb12d2-5453-cd79-92d7-58d13b94663f, 'name': SearchDatastore_Task, 'duration_secs': 0.014947} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.962224] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.962527] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 0d99fb99-977e-4edc-93d8-492d55fd68a7/0d99fb99-977e-4edc-93d8-492d55fd68a7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1284.963197] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5686e73b-6b34-48e8-ac9c-f92e863ca25b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.969686] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccd45f8-64dd-46f8-9a29-35fd0092c960 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.974284] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892468, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.981178] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26c92f7-ec7d-499f-b765-b03668bab05b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.984362] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1284.984362] env[68285]: value = "task-2892469" [ 1284.984362] env[68285]: _type = "Task" [ 1284.984362] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.015887] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6259bbdb-b767-4e9a-8880-db778d0fe155 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.021856] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.026948] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3eba16-8adf-4a3c-a6aa-a3b823c826a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.041061] env[68285]: DEBUG nova.compute.provider_tree [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.312824] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.312965] env[68285]: DEBUG nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Instance network_info: |[{"id": "07d808df-d1b1-42f4-8853-e537f5b160e0", "address": "fa:16:3e:7e:68:6d", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d808df-d1", "ovs_interfaceid": "07d808df-d1b1-42f4-8853-e537f5b160e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1285.313751] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:68:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07d808df-d1b1-42f4-8853-e537f5b160e0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1285.321510] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1285.321766] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1285.322032] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a91690b6-3ca3-4b47-9ed7-f63ccc32c6a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.343851] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.344051] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.344309] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1285.344309] env[68285]: value = "task-2892470" [ 1285.344309] env[68285]: _type = "Task" [ 1285.344309] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.345178] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2894b26d-a3b1-4786-a7a1-02dafefca4be {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.374067] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb73897-19b2-49fa-a5ca-d7fed921512c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.376818] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892470, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.402894] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Reconfiguring VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1285.403311] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c064b17-dac0-4045-be0f-ee1607e223db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.425743] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1285.425743] env[68285]: value = "task-2892471" [ 1285.425743] env[68285]: _type = "Task" [ 1285.425743] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.435433] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.466476] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892468, 'name': ReconfigVM_Task, 'duration_secs': 0.568919} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.466781] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfigured VM instance instance-0000006e to attach disk [datastore1] volume-bb993b4e-ec19-499c-a196-764a30b67abe/volume-bb993b4e-ec19-499c-a196-764a30b67abe.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1285.472410] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e530e57-cfce-4f01-a8d6-658342f90035 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.490176] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1285.490176] env[68285]: value = "task-2892472" [ 1285.490176] env[68285]: _type = "Task" [ 1285.490176] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.497076] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512605} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.497702] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 0d99fb99-977e-4edc-93d8-492d55fd68a7/0d99fb99-977e-4edc-93d8-492d55fd68a7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1285.497967] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1285.498669] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9494a28c-0e49-43ab-a411-06fd0dddf6ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.503533] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892472, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.508213] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1285.508213] env[68285]: value = "task-2892473" [ 1285.508213] env[68285]: _type = "Task" [ 1285.508213] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.517783] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.574248] env[68285]: DEBUG nova.scheduler.client.report [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1285.574561] env[68285]: DEBUG nova.compute.provider_tree [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 160 to 161 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1285.574769] env[68285]: DEBUG nova.compute.provider_tree [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.737416] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "d4818c98-8134-4426-bd35-b2339ed6abd4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.737709] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.737938] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.738142] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.738316] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.740556] env[68285]: INFO nova.compute.manager [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Terminating instance [ 1285.858522] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892470, 'name': CreateVM_Task, 'duration_secs': 0.362902} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.858734] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1285.859468] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.859659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.860053] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1285.860332] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00d2d1ea-29f1-4c27-abf2-bace1a36829e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.865502] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1285.865502] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e65fe2-5076-f063-8c23-5d3b054de6b8" [ 1285.865502] env[68285]: _type = "Task" [ 1285.865502] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.873384] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e65fe2-5076-f063-8c23-5d3b054de6b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.935422] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.002764] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892472, 'name': ReconfigVM_Task, 'duration_secs': 0.131316} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.003141] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581068', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'name': 'volume-bb993b4e-ec19-499c-a196-764a30b67abe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'faf810ae-7823-4115-a709-99dc7c480867', 'attached_at': '', 'detached_at': '', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'serial': 'bb993b4e-ec19-499c-a196-764a30b67abe'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1286.003665] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe6b2dec-a55f-4cea-9230-c3d957811a07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.009846] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1286.009846] env[68285]: value = "task-2892475" [ 1286.009846] env[68285]: _type = "Task" [ 1286.009846] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.019472] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072732} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.022380] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1286.022648] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892475, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.023454] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2da18c7-e3c4-40a3-8b94-aa081a2bc9e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.044271] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 0d99fb99-977e-4edc-93d8-492d55fd68a7/0d99fb99-977e-4edc-93d8-492d55fd68a7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1286.044562] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-904f0081-1656-464c-b494-4a64f5f6f162 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.064228] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1286.064228] env[68285]: value = "task-2892476" [ 1286.064228] env[68285]: _type = "Task" [ 1286.064228] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.075962] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892476, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.078693] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Received event network-changed-c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1286.078891] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Refreshing instance network info cache due to event network-changed-c311cfc8-4f78-4068-8841-8aa0ce5243c2. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1286.079125] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquiring lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.079274] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquired lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.079436] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Refreshing network info cache for port c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1286.081257] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.718s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.083778] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.837s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.084139] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.099271] env[68285]: DEBUG nova.compute.manager [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received event network-changed-11b23fc2-86d2-4f1a-9430-8afb7438275a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1286.099565] env[68285]: DEBUG nova.compute.manager [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Refreshing instance network info cache due to event network-changed-11b23fc2-86d2-4f1a-9430-8afb7438275a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1286.099814] env[68285]: DEBUG oslo_concurrency.lockutils [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] Acquiring lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.099990] env[68285]: DEBUG oslo_concurrency.lockutils [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] Acquired lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.100139] env[68285]: DEBUG nova.network.neutron [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Refreshing network info cache for port 11b23fc2-86d2-4f1a-9430-8afb7438275a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1286.109729] env[68285]: INFO nova.scheduler.client.report [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted allocations for instance 8a649b1e-d007-4032-a46c-b479365e5289 [ 1286.116269] env[68285]: INFO nova.scheduler.client.report [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted allocations for instance 5abddda1-9bf7-4039-81c7-8622f43cc72e [ 1286.244862] env[68285]: DEBUG nova.compute.manager [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1286.245116] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1286.246050] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62c980c-848c-428b-a678-d4ce0a12582f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.254255] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1286.254532] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-150d8d14-44b4-41e6-a841-a91952b5a252 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.260805] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1286.260805] env[68285]: value = "task-2892477" [ 1286.260805] env[68285]: _type = "Task" [ 1286.260805] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.268712] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.377257] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e65fe2-5076-f063-8c23-5d3b054de6b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009196} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.377574] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.377809] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1286.378052] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.378204] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.378388] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1286.378677] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-870fcfcd-7f42-440f-a251-f8a8ee4f2174 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.388377] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1286.388705] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1286.389839] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0135633-9f34-4215-990c-98cd76c33ba0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.395429] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1286.395429] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a5984c-f928-c41b-9129-7b4ee6c26c6d" [ 1286.395429] env[68285]: _type = "Task" [ 1286.395429] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.403400] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a5984c-f928-c41b-9129-7b4ee6c26c6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.437884] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.522557] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892475, 'name': Rename_Task, 'duration_secs': 0.135282} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.522844] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1286.523126] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6235fc3-20e5-4aa7-9eb6-841602e6d39f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.529725] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1286.529725] env[68285]: value = "task-2892478" [ 1286.529725] env[68285]: _type = "Task" [ 1286.529725] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.537325] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.573070] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.620495] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0a61f1e2-40a6-436b-a344-a7c6f12771a8 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "8a649b1e-d007-4032-a46c-b479365e5289" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.902s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.625121] env[68285]: DEBUG oslo_concurrency.lockutils [None req-482efb75-5e66-46e6-bef8-9a433e6df9e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "5abddda1-9bf7-4039-81c7-8622f43cc72e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.704s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.772182] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892477, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.789992] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updated VIF entry in instance network info cache for port c311cfc8-4f78-4068-8841-8aa0ce5243c2. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1286.790411] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [{"id": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "address": "fa:16:3e:12:c5:35", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc311cfc8-4f", "ovs_interfaceid": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.813710] env[68285]: DEBUG nova.network.neutron [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updated VIF entry in instance network info cache for port 11b23fc2-86d2-4f1a-9430-8afb7438275a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1286.814167] env[68285]: DEBUG nova.network.neutron [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "11b23fc2-86d2-4f1a-9430-8afb7438275a", "address": "fa:16:3e:df:3b:aa", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11b23fc2-86", "ovs_interfaceid": "11b23fc2-86d2-4f1a-9430-8afb7438275a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.906182] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a5984c-f928-c41b-9129-7b4ee6c26c6d, 'name': SearchDatastore_Task, 'duration_secs': 0.017354} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.906958] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4615dcb0-e01e-4481-8fe2-0517833018d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.924105] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1286.924105] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52638b22-2e12-48c5-6a70-b7821942d90b" [ 1286.924105] env[68285]: _type = "Task" [ 1286.924105] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.936823] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.939804] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52638b22-2e12-48c5-6a70-b7821942d90b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.040244] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892478, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.076929] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892476, 'name': ReconfigVM_Task, 'duration_secs': 0.875857} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.076929] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 0d99fb99-977e-4edc-93d8-492d55fd68a7/0d99fb99-977e-4edc-93d8-492d55fd68a7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1287.076929] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0053386-fddd-4add-ae2a-388662976e62 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.082979] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1287.082979] env[68285]: value = "task-2892479" [ 1287.082979] env[68285]: _type = "Task" [ 1287.082979] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.091459] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892479, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.270862] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892477, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.293966] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Releasing lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.294405] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Received event network-vif-plugged-e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1287.294624] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.294919] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.294999] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.295193] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] No waiting events found dispatching network-vif-plugged-e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1287.295360] env[68285]: WARNING nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Received unexpected event network-vif-plugged-e449375e-9811-46ce-83ca-faf0266e4837 for instance with vm_state building and task_state spawning. [ 1287.295528] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Received event network-changed-e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1287.295681] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Refreshing instance network info cache due to event network-changed-e449375e-9811-46ce-83ca-faf0266e4837. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1287.295860] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquiring lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.295999] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquired lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.296177] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Refreshing network info cache for port e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1287.316311] env[68285]: DEBUG oslo_concurrency.lockutils [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] Releasing lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.316553] env[68285]: DEBUG nova.compute.manager [req-5585419b-0aa7-4d09-b652-20a093d93e47 req-ad01f76c-24d2-44de-b52a-80c2a6b2dce0 service nova] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Received event network-vif-deleted-10900535-c864-4616-a243-0798b3cdb70a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1287.437668] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52638b22-2e12-48c5-6a70-b7821942d90b, 'name': SearchDatastore_Task, 'duration_secs': 0.033979} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.440875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.441132] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1287.441401] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.441611] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-372424a9-7e53-4c03-8119-253dc87f2b7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.448069] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1287.448069] env[68285]: value = "task-2892480" [ 1287.448069] env[68285]: _type = "Task" [ 1287.448069] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.455991] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.540360] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892478, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.594828] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892479, 'name': Rename_Task, 'duration_secs': 0.163238} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.595187] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1287.595480] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce38e04e-912c-4a96-bd22-4215138c4459 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.602102] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1287.602102] env[68285]: value = "task-2892481" [ 1287.602102] env[68285]: _type = "Task" [ 1287.602102] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.610386] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.764585] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "0329a534-0ba1-48df-aa9a-01d50bafab05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.764843] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.776059] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892477, 'name': PowerOffVM_Task, 'duration_secs': 1.170174} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.776937] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1287.777122] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1287.777388] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3e82c9e-d8a1-4012-8db3-289d0f801ee3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.874498] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1287.874752] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1287.874943] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleting the datastore file [datastore1] d4818c98-8134-4426-bd35-b2339ed6abd4 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1287.875230] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f689b83-c899-4ecf-ba83-734a9fcec685 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.882739] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for the task: (returnval){ [ 1287.882739] env[68285]: value = "task-2892483" [ 1287.882739] env[68285]: _type = "Task" [ 1287.882739] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.894841] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.941713] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.957429] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892480, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.029664] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updated VIF entry in instance network info cache for port e449375e-9811-46ce-83ca-faf0266e4837. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1288.030075] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updating instance_info_cache with network_info: [{"id": "e449375e-9811-46ce-83ca-faf0266e4837", "address": "fa:16:3e:f1:b7:04", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape449375e-98", "ovs_interfaceid": "e449375e-9811-46ce-83ca-faf0266e4837", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.041533] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892478, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.111716] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892481, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.271558] env[68285]: DEBUG nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1288.288832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.289256] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.393122] env[68285]: DEBUG oslo_vmware.api [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Task: {'id': task-2892483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256855} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.393412] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1288.393599] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1288.393777] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1288.393947] env[68285]: INFO nova.compute.manager [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1288.394245] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1288.394448] env[68285]: DEBUG nova.compute.manager [-] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1288.394541] env[68285]: DEBUG nova.network.neutron [-] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1288.442304] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.458307] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892480, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513555} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.460388] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1288.460602] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1288.460851] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06cdc441-45dc-40eb-a7e1-1e8cf3861416 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.469200] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1288.469200] env[68285]: value = "task-2892484" [ 1288.469200] env[68285]: _type = "Task" [ 1288.469200] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.476519] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.532391] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Releasing lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1288.532666] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Received event network-vif-plugged-07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1288.532911] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquiring lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.533329] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.533329] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.533513] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] No waiting events found dispatching network-vif-plugged-07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1288.533864] env[68285]: WARNING nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Received unexpected event network-vif-plugged-07d808df-d1b1-42f4-8853-e537f5b160e0 for instance with vm_state building and task_state spawning. [ 1288.533963] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Received event network-changed-07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1288.534128] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Refreshing instance network info cache due to event network-changed-07d808df-d1b1-42f4-8853-e537f5b160e0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1288.534347] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquiring lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.534460] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquired lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1288.534664] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Refreshing network info cache for port 07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1288.545042] env[68285]: DEBUG oslo_vmware.api [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892478, 'name': PowerOnVM_Task, 'duration_secs': 1.663383} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.545951] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1288.546181] env[68285]: INFO nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1288.546862] env[68285]: DEBUG nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1288.547184] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c168392c-16a3-4d52-8454-6084d5cd142e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.613218] env[68285]: DEBUG oslo_vmware.api [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892481, 'name': PowerOnVM_Task, 'duration_secs': 0.82285} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.613218] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1288.613218] env[68285]: INFO nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Took 8.05 seconds to spawn the instance on the hypervisor. [ 1288.613349] env[68285]: DEBUG nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1288.614090] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc442c3-41f2-4b60-9f49-a334e56bb5fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.723487] env[68285]: DEBUG nova.compute.manager [req-a165199b-7f40-4bdc-afbb-63f0c69c8ca1 req-fc1a4f13-9ee0-402b-aa86-c3d0742f412f service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Received event network-vif-deleted-d00eb1aa-97af-4a18-9582-416989e71604 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1288.723915] env[68285]: INFO nova.compute.manager [req-a165199b-7f40-4bdc-afbb-63f0c69c8ca1 req-fc1a4f13-9ee0-402b-aa86-c3d0742f412f service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Neutron deleted interface d00eb1aa-97af-4a18-9582-416989e71604; detaching it from the instance and deleting it from the info cache [ 1288.724157] env[68285]: DEBUG nova.network.neutron [req-a165199b-7f40-4bdc-afbb-63f0c69c8ca1 req-fc1a4f13-9ee0-402b-aa86-c3d0742f412f service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.792351] env[68285]: DEBUG nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1288.800206] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.800528] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.802400] env[68285]: INFO nova.compute.claims [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1288.944834] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.979424] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071638} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.979805] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1288.981037] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b6a5db-8e27-4479-8295-7a280458f4a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.003836] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1289.004141] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d6cd7a5-7011-4350-93d9-f94d0799ae7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.025781] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1289.025781] env[68285]: value = "task-2892485" [ 1289.025781] env[68285]: _type = "Task" [ 1289.025781] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.034888] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.066686] env[68285]: INFO nova.compute.manager [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Took 22.60 seconds to build instance. [ 1289.133366] env[68285]: INFO nova.compute.manager [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Took 15.83 seconds to build instance. [ 1289.153937] env[68285]: DEBUG nova.network.neutron [-] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.226890] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3da288cf-cd5b-458a-bf8e-460d841f3590 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.236559] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2266f71e-5819-49ef-a583-60af931e0a25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.248441] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Updated VIF entry in instance network info cache for port 07d808df-d1b1-42f4-8853-e537f5b160e0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1289.248802] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Updating instance_info_cache with network_info: [{"id": "07d808df-d1b1-42f4-8853-e537f5b160e0", "address": "fa:16:3e:7e:68:6d", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d808df-d1", "ovs_interfaceid": "07d808df-d1b1-42f4-8853-e537f5b160e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.274364] env[68285]: DEBUG nova.compute.manager [req-a165199b-7f40-4bdc-afbb-63f0c69c8ca1 req-fc1a4f13-9ee0-402b-aa86-c3d0742f412f service nova] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Detach interface failed, port_id=d00eb1aa-97af-4a18-9582-416989e71604, reason: Instance d4818c98-8134-4426-bd35-b2339ed6abd4 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1289.311853] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.444489] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.541852] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892485, 'name': ReconfigVM_Task, 'duration_secs': 0.469184} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.542268] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Reconfigured VM instance instance-00000070 to attach disk [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1289.542798] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b45712b2-f8ea-4163-9f20-c6070a454821 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.549275] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1289.549275] env[68285]: value = "task-2892486" [ 1289.549275] env[68285]: _type = "Task" [ 1289.549275] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.558340] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892486, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.569057] env[68285]: DEBUG oslo_concurrency.lockutils [None req-45f02969-aea3-49e4-a929-f5fa0e19ecb4 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.120s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1289.634505] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquiring lock "8a598506-724f-48f6-91a8-1e02483e6aab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.634825] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "8a598506-724f-48f6-91a8-1e02483e6aab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.638388] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3a804733-7f81-425c-8a58-91e4bd400ba5 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.346s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1289.656184] env[68285]: INFO nova.compute.manager [-] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Took 1.26 seconds to deallocate network for instance. [ 1289.751879] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Releasing lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1289.752276] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Received event network-changed-c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1289.752530] env[68285]: DEBUG nova.compute.manager [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Refreshing instance network info cache due to event network-changed-c5dfeee8-308e-441b-8f3c-84d8c2738b4e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1289.752757] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquiring lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.752942] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Acquired lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1289.753215] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Refreshing network info cache for port c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1289.952760] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.998521] env[68285]: DEBUG nova.compute.manager [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Received event network-changed-e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1289.998779] env[68285]: DEBUG nova.compute.manager [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Refreshing instance network info cache due to event network-changed-e449375e-9811-46ce-83ca-faf0266e4837. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1289.998860] env[68285]: DEBUG oslo_concurrency.lockutils [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] Acquiring lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.999069] env[68285]: DEBUG oslo_concurrency.lockutils [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] Acquired lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1289.999186] env[68285]: DEBUG nova.network.neutron [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Refreshing network info cache for port e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.060744] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892486, 'name': Rename_Task, 'duration_secs': 0.136363} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.061157] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.061428] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5d44e06-3217-42f7-93d4-c843fba2b96d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.070882] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1290.070882] env[68285]: value = "task-2892487" [ 1290.070882] env[68285]: _type = "Task" [ 1290.070882] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.078768] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892487, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.084084] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752013f9-b31d-4f71-9cf9-d9a02b91f688 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.090941] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23abcfd0-d863-4014-b8ad-2711d7be0a6d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.124486] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53415bf-ee42-4422-a096-b3fc795b8e35 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.132137] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d9984b-36dc-4ada-85b2-c4823cbc8044 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.146443] env[68285]: DEBUG nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1290.153989] env[68285]: DEBUG nova.compute.provider_tree [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.164764] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.445940] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.542464] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Updated VIF entry in instance network info cache for port c5dfeee8-308e-441b-8f3c-84d8c2738b4e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.542831] env[68285]: DEBUG nova.network.neutron [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Updating instance_info_cache with network_info: [{"id": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "address": "fa:16:3e:1b:59:67", "network": {"id": "a28978b6-c957-4db1-a22f-b74387b6172e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-968847195-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e29d4b330861437386054127da2a6872", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dfeee8-30", "ovs_interfaceid": "c5dfeee8-308e-441b-8f3c-84d8c2738b4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.583250] env[68285]: DEBUG oslo_vmware.api [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892487, 'name': PowerOnVM_Task, 'duration_secs': 0.476677} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.583513] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1290.583957] env[68285]: INFO nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Took 7.61 seconds to spawn the instance on the hypervisor. [ 1290.584134] env[68285]: DEBUG nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1290.585383] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92d7d99-607e-4adc-a91e-49d6175a1121 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.659744] env[68285]: DEBUG nova.scheduler.client.report [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1290.681086] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.754961] env[68285]: DEBUG nova.compute.manager [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Received event network-changed-655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1290.755306] env[68285]: DEBUG nova.compute.manager [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Refreshing instance network info cache due to event network-changed-655ee17d-c9b8-43d9-b783-8c0a559a8300. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1290.755498] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] Acquiring lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.755633] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] Acquired lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.755859] env[68285]: DEBUG nova.network.neutron [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Refreshing network info cache for port 655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.760642] env[68285]: DEBUG nova.network.neutron [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updated VIF entry in instance network info cache for port e449375e-9811-46ce-83ca-faf0266e4837. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.761071] env[68285]: DEBUG nova.network.neutron [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updating instance_info_cache with network_info: [{"id": "e449375e-9811-46ce-83ca-faf0266e4837", "address": "fa:16:3e:f1:b7:04", "network": {"id": "bb60b055-2ac4-4c81-ad65-5edcbc6023f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-684439242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "923c0329269c41159ae4469d358fe25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape449375e-98", "ovs_interfaceid": "e449375e-9811-46ce-83ca-faf0266e4837", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.946393] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.046500] env[68285]: DEBUG oslo_concurrency.lockutils [req-623094f2-4d46-4cb4-abe4-96f7c6d9e386 req-d2194b3e-4152-4072-b37f-ef8920653e32 service nova] Releasing lock "refresh_cache-51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.109175] env[68285]: INFO nova.compute.manager [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Took 15.14 seconds to build instance. [ 1291.168117] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.168117] env[68285]: DEBUG nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1291.170608] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.859s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.172720] env[68285]: INFO nova.compute.claims [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1291.263687] env[68285]: DEBUG oslo_concurrency.lockutils [req-5bf3399a-d944-4b2c-ac05-8fc04e2c0f7c req-9dcafcc1-b2c5-4d89-a7fe-2909cede1e1d service nova] Releasing lock "refresh_cache-0d99fb99-977e-4edc-93d8-492d55fd68a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.445952] env[68285]: DEBUG oslo_vmware.api [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892471, 'name': ReconfigVM_Task, 'duration_secs': 5.796731} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.445952] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.445952] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Reconfigured VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1291.487430] env[68285]: DEBUG nova.network.neutron [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updated VIF entry in instance network info cache for port 655ee17d-c9b8-43d9-b783-8c0a559a8300. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1291.487782] env[68285]: DEBUG nova.network.neutron [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updating instance_info_cache with network_info: [{"id": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "address": "fa:16:3e:91:6c:b0", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap655ee17d-c9", "ovs_interfaceid": "655ee17d-c9b8-43d9-b783-8c0a559a8300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.611593] env[68285]: DEBUG oslo_concurrency.lockutils [None req-aed508f0-6afe-484d-b3c6-8b3dc7a3652e tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.652s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.677283] env[68285]: DEBUG nova.compute.utils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1291.681781] env[68285]: DEBUG nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1291.681951] env[68285]: DEBUG nova.network.neutron [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1291.716943] env[68285]: DEBUG nova.policy [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '184360cab7224b9eaef80dfe89d0208b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288595d9298e43fa859bc6b68054aa08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1291.967018] env[68285]: DEBUG nova.compute.manager [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1291.993477] env[68285]: DEBUG nova.network.neutron [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Successfully created port: 3bf95754-e92f-4854-b0aa-78333d1e73ba {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1291.996161] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] Releasing lock "refresh_cache-d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.997105] env[68285]: DEBUG nova.compute.manager [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Received event network-changed-c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1291.997105] env[68285]: DEBUG nova.compute.manager [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Refreshing instance network info cache due to event network-changed-c311cfc8-4f78-4068-8841-8aa0ce5243c2. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1291.997105] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] Acquiring lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.997105] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] Acquired lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1291.997426] env[68285]: DEBUG nova.network.neutron [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Refreshing network info cache for port c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1292.181186] env[68285]: DEBUG nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1292.405766] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c365bdc2-4084-4f70-873b-8e32a49d273c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.412920] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6c0122-4c08-48d7-af7b-df112f7e50a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.443272] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf2f067-d243-402f-9494-424bd56ed3a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.450832] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec31bbc-bac4-4ec6-9c6d-8b6c00b0e903 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.463878] env[68285]: DEBUG nova.compute.provider_tree [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.482585] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.670605] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.670874] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.671108] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.671300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.671471] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.679235] env[68285]: INFO nova.compute.manager [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Terminating instance [ 1292.692431] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.692680] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1292.692864] env[68285]: DEBUG nova.network.neutron [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1292.782871] env[68285]: DEBUG nova.compute.manager [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Received event network-changed-07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1292.785210] env[68285]: DEBUG nova.compute.manager [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Refreshing instance network info cache due to event network-changed-07d808df-d1b1-42f4-8853-e537f5b160e0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1292.785390] env[68285]: DEBUG oslo_concurrency.lockutils [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] Acquiring lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.785749] env[68285]: DEBUG oslo_concurrency.lockutils [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] Acquired lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1292.786011] env[68285]: DEBUG nova.network.neutron [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Refreshing network info cache for port 07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1292.865812] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.866028] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.966298] env[68285]: DEBUG nova.scheduler.client.report [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1292.996068] env[68285]: DEBUG nova.network.neutron [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updated VIF entry in instance network info cache for port c311cfc8-4f78-4068-8841-8aa0ce5243c2. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1292.996716] env[68285]: DEBUG nova.network.neutron [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [{"id": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "address": "fa:16:3e:12:c5:35", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc311cfc8-4f", "ovs_interfaceid": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.183234] env[68285]: DEBUG nova.compute.manager [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1293.183550] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1293.184488] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91e2950-397c-47c1-baff-df073220a8b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.193705] env[68285]: DEBUG nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1293.195676] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1293.197775] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-626a3148-7dc7-422a-b5ce-e9a51034c73d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.206642] env[68285]: DEBUG oslo_vmware.api [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1293.206642] env[68285]: value = "task-2892488" [ 1293.206642] env[68285]: _type = "Task" [ 1293.206642] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.215625] env[68285]: DEBUG oslo_vmware.api [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.224602] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1293.224955] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1293.225252] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1293.225514] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1293.225680] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1293.225840] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1293.226120] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1293.226402] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1293.226605] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1293.226876] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1293.227143] env[68285]: DEBUG nova.virt.hardware [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1293.228026] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726d3087-d771-412d-b249-7b837d5dd308 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.238410] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519041e3-9f96-419c-9f0a-811eea13662e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.376301] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.376606] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.376969] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.377105] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.377280] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.465493] env[68285]: DEBUG nova.compute.manager [req-02464656-f4bf-41cd-bd3b-2f78c2fd427c req-7ef2321a-c533-4db7-b667-36d1e1013073 service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Received event network-vif-plugged-3bf95754-e92f-4854-b0aa-78333d1e73ba {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1293.465711] env[68285]: DEBUG oslo_concurrency.lockutils [req-02464656-f4bf-41cd-bd3b-2f78c2fd427c req-7ef2321a-c533-4db7-b667-36d1e1013073 service nova] Acquiring lock "0329a534-0ba1-48df-aa9a-01d50bafab05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.465915] env[68285]: DEBUG oslo_concurrency.lockutils [req-02464656-f4bf-41cd-bd3b-2f78c2fd427c req-7ef2321a-c533-4db7-b667-36d1e1013073 service nova] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.466099] env[68285]: DEBUG oslo_concurrency.lockutils [req-02464656-f4bf-41cd-bd3b-2f78c2fd427c req-7ef2321a-c533-4db7-b667-36d1e1013073 service nova] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.466269] env[68285]: DEBUG nova.compute.manager [req-02464656-f4bf-41cd-bd3b-2f78c2fd427c req-7ef2321a-c533-4db7-b667-36d1e1013073 service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] No waiting events found dispatching network-vif-plugged-3bf95754-e92f-4854-b0aa-78333d1e73ba {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1293.466493] env[68285]: WARNING nova.compute.manager [req-02464656-f4bf-41cd-bd3b-2f78c2fd427c req-7ef2321a-c533-4db7-b667-36d1e1013073 service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Received unexpected event network-vif-plugged-3bf95754-e92f-4854-b0aa-78333d1e73ba for instance with vm_state building and task_state spawning. [ 1293.467460] env[68285]: INFO nova.network.neutron [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Port 11b23fc2-86d2-4f1a-9430-8afb7438275a from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1293.467769] env[68285]: DEBUG nova.network.neutron [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.470807] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.471287] env[68285]: DEBUG nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1293.473980] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.309s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.474226] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.476227] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.795s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.478083] env[68285]: INFO nova.compute.claims [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1293.500887] env[68285]: DEBUG oslo_concurrency.lockutils [req-4f5dca73-cf72-44eb-9b03-2c154013442a req-0ecded18-a79f-4a7a-96cf-2be581e6a66e service nova] Releasing lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.504971] env[68285]: INFO nova.scheduler.client.report [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Deleted allocations for instance d4818c98-8134-4426-bd35-b2339ed6abd4 [ 1293.722280] env[68285]: DEBUG oslo_vmware.api [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892488, 'name': PowerOffVM_Task, 'duration_secs': 0.181703} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.722571] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1293.722779] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1293.723080] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0188d144-ef3f-4fea-8f28-9f9be70c0a25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.790939] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1293.791179] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1293.791367] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleting the datastore file [datastore1] 8917672f-3b0d-42a1-b8b1-94ac47ce941a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1293.791618] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-254189a1-9daa-49d8-b57f-18dd062fdabf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.797914] env[68285]: DEBUG oslo_vmware.api [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1293.797914] env[68285]: value = "task-2892490" [ 1293.797914] env[68285]: _type = "Task" [ 1293.797914] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.806140] env[68285]: DEBUG oslo_vmware.api [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.815295] env[68285]: DEBUG nova.network.neutron [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Updated VIF entry in instance network info cache for port 07d808df-d1b1-42f4-8853-e537f5b160e0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1293.815679] env[68285]: DEBUG nova.network.neutron [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Updating instance_info_cache with network_info: [{"id": "07d808df-d1b1-42f4-8853-e537f5b160e0", "address": "fa:16:3e:7e:68:6d", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d808df-d1", "ovs_interfaceid": "07d808df-d1b1-42f4-8853-e537f5b160e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.882926] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.970297] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-8917672f-3b0d-42a1-b8b1-94ac47ce941a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.982450] env[68285]: DEBUG nova.compute.utils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1293.985875] env[68285]: DEBUG nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1293.986254] env[68285]: DEBUG nova.network.neutron [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1294.016177] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c1ad0474-b8ef-44d7-8794-2579e95ce88a tempest-DeleteServersTestJSON-1021365396 tempest-DeleteServersTestJSON-1021365396-project-member] Lock "d4818c98-8134-4426-bd35-b2339ed6abd4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.278s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.025574] env[68285]: DEBUG nova.policy [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fee422406a774be7830837baa9743f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7106da1f6bcb4d0cb3dcad984b3adb33', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1294.308257] env[68285]: DEBUG nova.network.neutron [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Successfully updated port: 3bf95754-e92f-4854-b0aa-78333d1e73ba {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1294.313615] env[68285]: DEBUG oslo_vmware.api [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.317936] env[68285]: DEBUG oslo_concurrency.lockutils [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] Releasing lock "refresh_cache-a1dc8c86-523f-4474-9fea-9ccf35a36b3f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1294.318202] env[68285]: DEBUG nova.compute.manager [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received event network-vif-deleted-11b23fc2-86d2-4f1a-9430-8afb7438275a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1294.318382] env[68285]: INFO nova.compute.manager [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Neutron deleted interface 11b23fc2-86d2-4f1a-9430-8afb7438275a; detaching it from the instance and deleting it from the info cache [ 1294.318652] env[68285]: DEBUG nova.network.neutron [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [{"id": "1f6dacae-76ce-408a-8e61-deddf144ba68", "address": "fa:16:3e:f2:71:1b", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f6dacae-76", "ovs_interfaceid": "1f6dacae-76ce-408a-8e61-deddf144ba68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.321813] env[68285]: DEBUG nova.network.neutron [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Successfully created port: 10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1294.474672] env[68285]: DEBUG oslo_concurrency.lockutils [None req-139dd796-0b91-4a1c-8a02-a58ef29183ec tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-8917672f-3b0d-42a1-b8b1-94ac47ce941a-11b23fc2-86d2-4f1a-9430-8afb7438275a" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.636s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.487823] env[68285]: DEBUG nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1294.695600] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fd725c-b7b8-4669-8e0a-2586119fcbf3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.703252] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c36eca0-bae9-47fd-ac7b-ee574162a39d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.733787] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d35dc33-40d9-437e-acaf-353f3bbf3649 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.740750] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcc04e2-c69a-49f6-ad40-b24a246d1245 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.753651] env[68285]: DEBUG nova.compute.provider_tree [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1294.808800] env[68285]: DEBUG oslo_vmware.api [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.592542} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.809785] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "refresh_cache-0329a534-0ba1-48df-aa9a-01d50bafab05" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.809916] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "refresh_cache-0329a534-0ba1-48df-aa9a-01d50bafab05" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1294.810159] env[68285]: DEBUG nova.network.neutron [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1294.811113] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1294.811355] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1294.811603] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1294.811737] env[68285]: INFO nova.compute.manager [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1294.811909] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1294.812901] env[68285]: DEBUG nova.compute.manager [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Received event network-changed-3bf95754-e92f-4854-b0aa-78333d1e73ba {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1294.814050] env[68285]: DEBUG nova.compute.manager [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Refreshing instance network info cache due to event network-changed-3bf95754-e92f-4854-b0aa-78333d1e73ba. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1294.814050] env[68285]: DEBUG oslo_concurrency.lockutils [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] Acquiring lock "refresh_cache-0329a534-0ba1-48df-aa9a-01d50bafab05" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.814050] env[68285]: DEBUG nova.compute.manager [-] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1294.814050] env[68285]: DEBUG nova.network.neutron [-] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1294.821189] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51b31bbe-9e1c-4bf3-84bb-a7463bf61195 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.832036] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8570707-6484-4fdf-80c4-2d3c028e79f0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.863521] env[68285]: DEBUG nova.compute.manager [req-26bcc737-679c-417d-afe6-1e1888f3b353 req-2ff7195c-18fe-477f-93a3-009623caa94b service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Detach interface failed, port_id=11b23fc2-86d2-4f1a-9430-8afb7438275a, reason: Instance 8917672f-3b0d-42a1-b8b1-94ac47ce941a could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1295.274438] env[68285]: ERROR nova.scheduler.client.report [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [req-f5efbcbb-617b-4c3e-8d69-4ef39b75d2c6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f5efbcbb-617b-4c3e-8d69-4ef39b75d2c6"}]} [ 1295.290834] env[68285]: DEBUG nova.scheduler.client.report [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1295.304789] env[68285]: DEBUG nova.scheduler.client.report [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1295.305088] env[68285]: DEBUG nova.compute.provider_tree [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1295.321094] env[68285]: DEBUG nova.scheduler.client.report [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1295.348449] env[68285]: DEBUG nova.network.neutron [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1295.352670] env[68285]: DEBUG nova.scheduler.client.report [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1295.490543] env[68285]: DEBUG nova.network.neutron [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Updating instance_info_cache with network_info: [{"id": "3bf95754-e92f-4854-b0aa-78333d1e73ba", "address": "fa:16:3e:25:6e:12", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf95754-e9", "ovs_interfaceid": "3bf95754-e92f-4854-b0aa-78333d1e73ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.499606] env[68285]: DEBUG nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1295.525179] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1295.525433] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1295.525590] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1295.525768] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1295.525914] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1295.526072] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1295.526283] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1295.526443] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1295.526606] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1295.526764] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1295.526933] env[68285]: DEBUG nova.virt.hardware [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1295.528090] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9d14a8-a0bb-4bb7-bf18-ec7864bcdde5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.538285] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5c34f2-d628-40e5-bdbc-891362bfa3fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.577887] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361cb142-5f41-424c-9a3e-2456f5afde35 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.585651] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d9ce7a-dc73-43ec-ab6e-fcb1cb74cc66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.617376] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76b2e4e-1659-4ea5-aa5c-934f6896ead9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.625296] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac41561c-2777-4cd8-a9fd-f9406543bd96 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.638595] env[68285]: DEBUG nova.compute.provider_tree [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1295.707727] env[68285]: DEBUG nova.network.neutron [-] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.823816] env[68285]: DEBUG nova.network.neutron [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Successfully updated port: 10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1295.993065] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "refresh_cache-0329a534-0ba1-48df-aa9a-01d50bafab05" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.993334] env[68285]: DEBUG nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Instance network_info: |[{"id": "3bf95754-e92f-4854-b0aa-78333d1e73ba", "address": "fa:16:3e:25:6e:12", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf95754-e9", "ovs_interfaceid": "3bf95754-e92f-4854-b0aa-78333d1e73ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1295.993670] env[68285]: DEBUG oslo_concurrency.lockutils [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] Acquired lock "refresh_cache-0329a534-0ba1-48df-aa9a-01d50bafab05" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1295.993913] env[68285]: DEBUG nova.network.neutron [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Refreshing network info cache for port 3bf95754-e92f-4854-b0aa-78333d1e73ba {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1295.995245] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:6e:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bf95754-e92f-4854-b0aa-78333d1e73ba', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1296.003357] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1296.007113] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1296.007642] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b45d3b4-f3f0-4ba9-8fda-dfb23224e4e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.036632] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1296.036632] env[68285]: value = "task-2892492" [ 1296.036632] env[68285]: _type = "Task" [ 1296.036632] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.049442] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892492, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.170724] env[68285]: DEBUG nova.scheduler.client.report [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1296.170986] env[68285]: DEBUG nova.compute.provider_tree [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 166 to 167 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1296.171183] env[68285]: DEBUG nova.compute.provider_tree [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1296.210036] env[68285]: INFO nova.compute.manager [-] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Took 1.40 seconds to deallocate network for instance. [ 1296.230094] env[68285]: DEBUG nova.network.neutron [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Updated VIF entry in instance network info cache for port 3bf95754-e92f-4854-b0aa-78333d1e73ba. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1296.230094] env[68285]: DEBUG nova.network.neutron [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Updating instance_info_cache with network_info: [{"id": "3bf95754-e92f-4854-b0aa-78333d1e73ba", "address": "fa:16:3e:25:6e:12", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf95754-e9", "ovs_interfaceid": "3bf95754-e92f-4854-b0aa-78333d1e73ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.327058] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.327058] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.327627] env[68285]: DEBUG nova.network.neutron [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1296.546885] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892492, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.676650] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.200s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.677057] env[68285]: DEBUG nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1296.680035] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.197s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.716155] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.732506] env[68285]: DEBUG oslo_concurrency.lockutils [req-3b3286c9-5a75-4b21-9473-003ec987a1f4 req-1c4c423e-b811-41f6-9b1d-7af483a0dd2d service nova] Releasing lock "refresh_cache-0329a534-0ba1-48df-aa9a-01d50bafab05" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.839995] env[68285]: DEBUG nova.compute.manager [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Received event network-vif-deleted-1f6dacae-76ce-408a-8e61-deddf144ba68 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.840247] env[68285]: DEBUG nova.compute.manager [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-vif-plugged-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.840446] env[68285]: DEBUG oslo_concurrency.lockutils [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.840647] env[68285]: DEBUG oslo_concurrency.lockutils [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.840811] env[68285]: DEBUG oslo_concurrency.lockutils [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.840972] env[68285]: DEBUG nova.compute.manager [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] No waiting events found dispatching network-vif-plugged-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1296.841158] env[68285]: WARNING nova.compute.manager [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received unexpected event network-vif-plugged-10199287-9009-48cc-b97a-e94229f7d640 for instance with vm_state building and task_state spawning. [ 1296.841314] env[68285]: DEBUG nova.compute.manager [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-changed-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.841467] env[68285]: DEBUG nova.compute.manager [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing instance network info cache due to event network-changed-10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1296.841628] env[68285]: DEBUG oslo_concurrency.lockutils [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] Acquiring lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.860412] env[68285]: DEBUG nova.network.neutron [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1296.989468] env[68285]: DEBUG nova.network.neutron [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10199287-90", "ovs_interfaceid": "10199287-9009-48cc-b97a-e94229f7d640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.046875] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892492, 'name': CreateVM_Task, 'duration_secs': 0.51327} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.047066] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1297.047785] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.047901] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.048240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1297.048491] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-707a32b9-10b5-4bc2-a8ed-8f01e60a2031 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.053597] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1297.053597] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d86118-8eff-f9b5-e621-3aad389331aa" [ 1297.053597] env[68285]: _type = "Task" [ 1297.053597] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.061090] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d86118-8eff-f9b5-e621-3aad389331aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.183221] env[68285]: DEBUG nova.compute.utils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1297.186416] env[68285]: INFO nova.compute.claims [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1297.189880] env[68285]: DEBUG nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1297.190052] env[68285]: DEBUG nova.network.neutron [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1297.240707] env[68285]: DEBUG nova.policy [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef9d4ec8f82d4c938f710cc9cfefd58f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f627acd23dc4889a2d0ca8dd1ea865a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1297.495285] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.495548] env[68285]: DEBUG nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Instance network_info: |[{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10199287-90", "ovs_interfaceid": "10199287-9009-48cc-b97a-e94229f7d640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1297.495752] env[68285]: DEBUG oslo_concurrency.lockutils [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] Acquired lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.495927] env[68285]: DEBUG nova.network.neutron [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing network info cache for port 10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1297.497163] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:49:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10199287-9009-48cc-b97a-e94229f7d640', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1297.508121] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1297.508730] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1297.510845] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c20d356f-19ed-4bca-817b-e2758f3b047c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.534409] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1297.534409] env[68285]: value = "task-2892493" [ 1297.534409] env[68285]: _type = "Task" [ 1297.534409] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.543243] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892493, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.564282] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d86118-8eff-f9b5-e621-3aad389331aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010552} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.564347] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.564597] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1297.564838] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.564987] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.565183] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1297.565449] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06398375-73e4-4ba4-a22b-4edec8e7945f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.576376] env[68285]: DEBUG nova.network.neutron [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Successfully created port: d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1297.578557] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.579110] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1297.579806] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28cb6b91-8e93-4935-a62f-12572a49a355 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.585756] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1297.585756] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f89994-df29-7343-91da-2933a2ae5de0" [ 1297.585756] env[68285]: _type = "Task" [ 1297.585756] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.594789] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f89994-df29-7343-91da-2933a2ae5de0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.690648] env[68285]: DEBUG nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1297.695022] env[68285]: INFO nova.compute.resource_tracker [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating resource usage from migration dd2dc971-bb14-469f-bd32-e05d3aade332 [ 1297.880378] env[68285]: DEBUG nova.network.neutron [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updated VIF entry in instance network info cache for port 10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1297.880724] env[68285]: DEBUG nova.network.neutron [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10199287-90", "ovs_interfaceid": "10199287-9009-48cc-b97a-e94229f7d640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.049148] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892493, 'name': CreateVM_Task, 'duration_secs': 0.323312} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.051515] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1298.052361] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.052905] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.052905] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1298.053165] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8135e3de-2348-482a-a146-23639e8d4325 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.057722] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1298.057722] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52288d67-e2a8-a66d-9143-3dffa809d664" [ 1298.057722] env[68285]: _type = "Task" [ 1298.057722] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.063113] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e0dc8e-69a7-4c51-b52a-ffff9b8fbea1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.068722] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52288d67-e2a8-a66d-9143-3dffa809d664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.073156] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a44a89-8286-4770-9512-3c1948e2af1d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.109941] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5941917c-f48c-4ad8-9f4b-b3f47c13d090 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.120262] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f89994-df29-7343-91da-2933a2ae5de0, 'name': SearchDatastore_Task, 'duration_secs': 0.013975} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.122544] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f815dee5-1c7b-4ca9-a88a-ad42b3a369ca {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.126817] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-332bc814-ae2b-44a3-9877-fe48a110aa55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.133517] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1298.133517] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521ece47-a5d6-4243-5b67-1a8634955fc1" [ 1298.133517] env[68285]: _type = "Task" [ 1298.133517] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.141633] env[68285]: DEBUG nova.compute.provider_tree [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.152631] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]521ece47-a5d6-4243-5b67-1a8634955fc1, 'name': SearchDatastore_Task, 'duration_secs': 0.009852} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.153533] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.153788] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 0329a534-0ba1-48df-aa9a-01d50bafab05/0329a534-0ba1-48df-aa9a-01d50bafab05.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1298.154061] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05369aa2-994f-4d28-92ec-c2af52d34519 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.162454] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1298.162454] env[68285]: value = "task-2892494" [ 1298.162454] env[68285]: _type = "Task" [ 1298.162454] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.170810] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892494, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.200584] env[68285]: INFO nova.virt.block_device [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Booting with volume 256c2839-790d-4956-aefd-ad8ce558c59d at /dev/sda [ 1298.242472] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84357166-0a3d-41ae-b401-14806085e7a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.252171] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b1cfa3-e0ae-4ffd-94ce-27769c2f34f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.287434] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c67e432a-8825-4f05-9a5e-8e6e9c1af341 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.298331] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f05cf62-9ff0-4272-910d-2065d9809ae8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.339277] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e966b9b7-a13a-4bd9-8ecd-9235806fe2c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.346710] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac40c2f-c913-4893-826e-ff89c0d02a21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.362038] env[68285]: DEBUG nova.virt.block_device [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Updating existing volume attachment record: 9882ab55-5e51-432d-bba5-4b0cc1f6be1f {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1298.384812] env[68285]: DEBUG oslo_concurrency.lockutils [req-94bb8726-1450-4f93-9a7c-99701af24e68 req-0a51fee0-4724-4ac4-aded-f144d1236a88 service nova] Releasing lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.570356] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52288d67-e2a8-a66d-9143-3dffa809d664, 'name': SearchDatastore_Task, 'duration_secs': 0.009662} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.570719] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.570955] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1298.571211] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.571389] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.571583] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1298.571852] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9de6c7cc-81e9-4865-bbaf-0f3542729ce9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.587244] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1298.587482] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1298.588407] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b34c4987-8446-4d98-94fc-65fb788935f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.595911] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1298.595911] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52961b47-72e1-9444-0422-ffc524ef4f35" [ 1298.595911] env[68285]: _type = "Task" [ 1298.595911] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.605347] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52961b47-72e1-9444-0422-ffc524ef4f35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.648885] env[68285]: DEBUG nova.scheduler.client.report [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1298.672940] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892494, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506638} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.673286] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 0329a534-0ba1-48df-aa9a-01d50bafab05/0329a534-0ba1-48df-aa9a-01d50bafab05.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1298.673674] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1298.674262] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8cae798c-89f6-4c81-8ad3-5fe1b5990b4f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.680679] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1298.680679] env[68285]: value = "task-2892495" [ 1298.680679] env[68285]: _type = "Task" [ 1298.680679] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.689517] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.103029] env[68285]: DEBUG nova.compute.manager [req-fb6a0ce9-eb7c-4fd6-86a1-b80ecc641166 req-2903aa0e-8660-4dcb-98c4-11d70d4e0af4 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Received event network-vif-plugged-d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1299.103486] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb6a0ce9-eb7c-4fd6-86a1-b80ecc641166 req-2903aa0e-8660-4dcb-98c4-11d70d4e0af4 service nova] Acquiring lock "8a598506-724f-48f6-91a8-1e02483e6aab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.103868] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb6a0ce9-eb7c-4fd6-86a1-b80ecc641166 req-2903aa0e-8660-4dcb-98c4-11d70d4e0af4 service nova] Lock "8a598506-724f-48f6-91a8-1e02483e6aab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.105451] env[68285]: DEBUG oslo_concurrency.lockutils [req-fb6a0ce9-eb7c-4fd6-86a1-b80ecc641166 req-2903aa0e-8660-4dcb-98c4-11d70d4e0af4 service nova] Lock "8a598506-724f-48f6-91a8-1e02483e6aab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.105451] env[68285]: DEBUG nova.compute.manager [req-fb6a0ce9-eb7c-4fd6-86a1-b80ecc641166 req-2903aa0e-8660-4dcb-98c4-11d70d4e0af4 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] No waiting events found dispatching network-vif-plugged-d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1299.105451] env[68285]: WARNING nova.compute.manager [req-fb6a0ce9-eb7c-4fd6-86a1-b80ecc641166 req-2903aa0e-8660-4dcb-98c4-11d70d4e0af4 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Received unexpected event network-vif-plugged-d215cd64-22e2-46be-88b2-f3185156486b for instance with vm_state building and task_state block_device_mapping. [ 1299.112689] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52961b47-72e1-9444-0422-ffc524ef4f35, 'name': SearchDatastore_Task, 'duration_secs': 0.052138} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.115149] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ab317c-d7cb-46aa-9dfa-4dd6cd8eea3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.122246] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1299.122246] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52611998-b0fe-d5f9-9246-4cf317e713cd" [ 1299.122246] env[68285]: _type = "Task" [ 1299.122246] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.131018] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52611998-b0fe-d5f9-9246-4cf317e713cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.157019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.475s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.157019] env[68285]: INFO nova.compute.manager [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Migrating [ 1299.162398] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.279s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.162826] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.163139] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1299.163593] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.448s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.165104] env[68285]: DEBUG nova.objects.instance [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'resources' on Instance uuid 8917672f-3b0d-42a1-b8b1-94ac47ce941a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1299.168874] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13d3698-f192-4783-85ae-7bf16af7b6df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.191963] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022c6d72-a342-4662-8d43-dc40dc3b679a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.201581] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076919} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.202542] env[68285]: DEBUG nova.network.neutron [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Successfully updated port: d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1299.211749] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1299.213909] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33df9a31-f13b-47d8-9731-7eb0502a6323 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.216950] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ffc0a3-9654-490b-bff5-0843e38917de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.241287] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 0329a534-0ba1-48df-aa9a-01d50bafab05/0329a534-0ba1-48df-aa9a-01d50bafab05.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.243742] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67ddadd9-37ee-44c7-a7b4-a7b403ac4868 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.259548] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8435083-6be3-49e0-8c71-a1c2adf3962a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.290584] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179066MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1299.290584] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.291083] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1299.291083] env[68285]: value = "task-2892496" [ 1299.291083] env[68285]: _type = "Task" [ 1299.291083] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.632794] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52611998-b0fe-d5f9-9246-4cf317e713cd, 'name': SearchDatastore_Task, 'duration_secs': 0.010198} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.633109] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.633316] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1299.633633] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e0f087d-93eb-4bfa-ad2d-c44f7f948ab3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.640282] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1299.640282] env[68285]: value = "task-2892497" [ 1299.640282] env[68285]: _type = "Task" [ 1299.640282] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.648061] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.680082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.680082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.680306] env[68285]: DEBUG nova.network.neutron [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1299.714954] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquiring lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.714954] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquired lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.715111] env[68285]: DEBUG nova.network.neutron [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1299.802481] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892496, 'name': ReconfigVM_Task, 'duration_secs': 0.317673} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.802789] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 0329a534-0ba1-48df-aa9a-01d50bafab05/0329a534-0ba1-48df-aa9a-01d50bafab05.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1299.803452] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9aed69df-6759-401e-9bed-404e27907281 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.810308] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1299.810308] env[68285]: value = "task-2892498" [ 1299.810308] env[68285]: _type = "Task" [ 1299.810308] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.818467] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892498, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.897998] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3c3ab4-afc1-4009-afa2-40861a28baa2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.906090] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce33369-5b51-4679-b841-60a6917ac5a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.940985] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4176ab-a394-4dde-ab0a-410eff951e23 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.949713] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda67692-77d2-4e8f-8090-ba234e00f5fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.967126] env[68285]: DEBUG nova.compute.provider_tree [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1300.149711] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892497, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.279828] env[68285]: DEBUG nova.network.neutron [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1300.319847] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892498, 'name': Rename_Task, 'duration_secs': 0.149916} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.320148] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.320370] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e78ec6f-d46f-4b85-a547-fd4b88101b8c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.326792] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1300.326792] env[68285]: value = "task-2892499" [ 1300.326792] env[68285]: _type = "Task" [ 1300.326792] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.334310] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.469116] env[68285]: DEBUG nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1300.469648] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1300.469861] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1300.470087] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1300.470436] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1300.470614] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1300.470772] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1300.470994] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1300.471184] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1300.471360] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1300.471527] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1300.471703] env[68285]: DEBUG nova.virt.hardware [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1300.475597] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef43f122-6b43-4107-ac76-b123e9174cd1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.484903] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3b040c-6b9c-4210-ace3-dfbed8814f32 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.509798] env[68285]: DEBUG nova.scheduler.client.report [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 167 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1300.510168] env[68285]: DEBUG nova.compute.provider_tree [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 167 to 168 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1300.510387] env[68285]: DEBUG nova.compute.provider_tree [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1300.574172] env[68285]: DEBUG nova.network.neutron [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Updating instance_info_cache with network_info: [{"id": "d215cd64-22e2-46be-88b2-f3185156486b", "address": "fa:16:3e:8f:74:88", "network": {"id": "c9e39941-7f0e-4dda-8088-ed1daee93ef3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-530210684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f627acd23dc4889a2d0ca8dd1ea865a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd215cd64-22", "ovs_interfaceid": "d215cd64-22e2-46be-88b2-f3185156486b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.628677] env[68285]: DEBUG nova.network.neutron [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [{"id": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "address": "fa:16:3e:12:c5:35", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc311cfc8-4f", "ovs_interfaceid": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.654157] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892497, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617528} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.654157] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1300.654157] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1300.654690] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77bae6e8-3082-4984-a2e0-d24bdbf31fed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.661112] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1300.661112] env[68285]: value = "task-2892500" [ 1300.661112] env[68285]: _type = "Task" [ 1300.661112] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.671150] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.837722] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892499, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.015589] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.017846] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.728s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.044066] env[68285]: INFO nova.scheduler.client.report [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted allocations for instance 8917672f-3b0d-42a1-b8b1-94ac47ce941a [ 1301.076945] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Releasing lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.078301] env[68285]: DEBUG nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance network_info: |[{"id": "d215cd64-22e2-46be-88b2-f3185156486b", "address": "fa:16:3e:8f:74:88", "network": {"id": "c9e39941-7f0e-4dda-8088-ed1daee93ef3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-530210684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f627acd23dc4889a2d0ca8dd1ea865a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd215cd64-22", "ovs_interfaceid": "d215cd64-22e2-46be-88b2-f3185156486b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1301.078301] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:74:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd215cd64-22e2-46be-88b2-f3185156486b', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1301.085259] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Creating folder: Project (5f627acd23dc4889a2d0ca8dd1ea865a). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1301.085579] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04a7d7fc-fee0-41dd-b0db-e103a2dcdd1b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.100555] env[68285]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1301.100709] env[68285]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68285) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1301.101048] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Folder already exists: Project (5f627acd23dc4889a2d0ca8dd1ea865a). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1301.101250] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Creating folder: Instances. Parent ref: group-v581070. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1301.101481] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88bb76b9-f153-4005-8dbf-ad02ede2ad50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.112225] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Created folder: Instances in parent group-v581070. [ 1301.112484] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1301.112671] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1301.112875] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b87018ab-4db1-4285-9a55-a67da4b3d85b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.131265] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.134514] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1301.134514] env[68285]: value = "task-2892503" [ 1301.134514] env[68285]: _type = "Task" [ 1301.134514] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.140145] env[68285]: DEBUG nova.compute.manager [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Received event network-changed-d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1301.140340] env[68285]: DEBUG nova.compute.manager [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Refreshing instance network info cache due to event network-changed-d215cd64-22e2-46be-88b2-f3185156486b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1301.141028] env[68285]: DEBUG oslo_concurrency.lockutils [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] Acquiring lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.141028] env[68285]: DEBUG oslo_concurrency.lockutils [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] Acquired lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.141028] env[68285]: DEBUG nova.network.neutron [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Refreshing network info cache for port d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1301.147453] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892503, 'name': CreateVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.170315] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062541} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.170586] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1301.171377] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092ad052-655f-4f18-9050-e30881904de2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.194661] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1301.195013] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2364ed22-6956-4393-8339-e692ac2c29ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.216674] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1301.216674] env[68285]: value = "task-2892504" [ 1301.216674] env[68285]: _type = "Task" [ 1301.216674] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.226397] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892504, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.339422] env[68285]: DEBUG oslo_vmware.api [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892499, 'name': PowerOnVM_Task, 'duration_secs': 0.519164} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.339721] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.339956] env[68285]: INFO nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1301.340156] env[68285]: DEBUG nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1301.340936] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75494726-e5a5-423f-af38-45fbda88d354 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.554540] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e6c1b43e-dd30-4491-986a-14c6615a5b1f tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "8917672f-3b0d-42a1-b8b1-94ac47ce941a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.883s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.649275] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892503, 'name': CreateVM_Task, 'duration_secs': 0.38157} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.649389] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1301.650024] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'device_type': None, 'attachment_id': '9882ab55-5e51-432d-bba5-4b0cc1f6be1f', 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581076', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'name': 'volume-256c2839-790d-4956-aefd-ad8ce558c59d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8a598506-724f-48f6-91a8-1e02483e6aab', 'attached_at': '', 'detached_at': '', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'serial': '256c2839-790d-4956-aefd-ad8ce558c59d'}, 'volume_type': None}], 'swap': None} {{(pid=68285) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1301.650311] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Root volume attach. Driver type: vmdk {{(pid=68285) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1301.650980] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6a12a2-bdc4-4e88-95d0-6f0013eea04e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.658507] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7cc219-dfa3-44b9-8a2a-fa4da46f754c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.666189] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4526d768-d7e4-42fa-8671-cec072276fb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.672797] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-2f3ed03a-f34f-4273-aef8-942a0b286b3c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.682106] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1301.682106] env[68285]: value = "task-2892505" [ 1301.682106] env[68285]: _type = "Task" [ 1301.682106] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.691125] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.726368] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892504, 'name': ReconfigVM_Task, 'duration_secs': 0.393896} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.728675] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Reconfigured VM instance instance-00000072 to attach disk [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1301.729623] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82cc1817-b550-48e9-9b89-ab3cb982d89b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.736023] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1301.736023] env[68285]: value = "task-2892506" [ 1301.736023] env[68285]: _type = "Task" [ 1301.736023] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.745327] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892506, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.855122] env[68285]: DEBUG nova.network.neutron [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Updated VIF entry in instance network info cache for port d215cd64-22e2-46be-88b2-f3185156486b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1301.855490] env[68285]: DEBUG nova.network.neutron [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Updating instance_info_cache with network_info: [{"id": "d215cd64-22e2-46be-88b2-f3185156486b", "address": "fa:16:3e:8f:74:88", "network": {"id": "c9e39941-7f0e-4dda-8088-ed1daee93ef3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-530210684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f627acd23dc4889a2d0ca8dd1ea865a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd215cd64-22", "ovs_interfaceid": "d215cd64-22e2-46be-88b2-f3185156486b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.862224] env[68285]: INFO nova.compute.manager [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Took 13.08 seconds to build instance. [ 1302.030105] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Applying migration context for instance faf810ae-7823-4115-a709-99dc7c480867 as it has an incoming, in-progress migration dd2dc971-bb14-469f-bd32-e05d3aade332. Migration status is migrating {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1302.031890] env[68285]: INFO nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating resource usage from migration dd2dc971-bb14-469f-bd32-e05d3aade332 [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 3858399e-9fc4-4d60-a9d5-95caefb7bd87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d1446290-95ce-4e87-85df-7cc69bb57ce7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 801f524e-28b5-4452-b880-0fc30d3c5eef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance feda1a98-3086-43a6-a887-f4d1602ca8ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance b2199b56-64bd-4096-b877-e10656b09313 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 852ab501-00a6-442b-804a-1bbf49a2be8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 0d99fb99-977e-4edc-93d8-492d55fd68a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a1dc8c86-523f-4474-9fea-9ccf35a36b3f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 0329a534-0ba1-48df-aa9a-01d50bafab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance e3117ede-5d88-4e47-a32f-ea91b1ba83ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 8a598506-724f-48f6-91a8-1e02483e6aab actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.066187] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Migration dd2dc971-bb14-469f-bd32-e05d3aade332 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1302.067925] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance faf810ae-7823-4115-a709-99dc7c480867 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1302.067925] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1302.067925] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1302.200714] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 42%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.253362] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892506, 'name': Rename_Task, 'duration_secs': 0.145925} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.254048] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1302.254545] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c45736e-11b5-42cb-a433-07e8c60de2e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.272488] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1302.272488] env[68285]: value = "task-2892507" [ 1302.272488] env[68285]: _type = "Task" [ 1302.272488] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.287936] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892507, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.349766] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d38c58-8c8a-49f6-8830-2a721234afa5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.358751] env[68285]: DEBUG oslo_concurrency.lockutils [req-adcbd13f-a6f2-44a3-a484-a0bbf904f46b req-c33b14cf-b300-410a-9ec7-000f5fbd7b4c service nova] Releasing lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.363433] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348d986a-f172-4ee0-87d4-0152e3a64eaa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.367677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d02478fd-81dc-445f-b188-2f656c01f131 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.603s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.412724] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e7b0dc-8d2f-460e-b363-82e4b77cef15 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.427033] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dd6892-4980-4bb3-b04f-cdbcc4f84d8a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.447068] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1302.652174] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f283134e-cdad-4eb2-b88a-ec01b6e5198f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.675910] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance 'faf810ae-7823-4115-a709-99dc7c480867' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1302.696280] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 54%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.788055] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892507, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.952787] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1303.073168] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "0329a534-0ba1-48df-aa9a-01d50bafab05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.074939] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.074939] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "0329a534-0ba1-48df-aa9a-01d50bafab05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.074939] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.074939] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.077469] env[68285]: INFO nova.compute.manager [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Terminating instance [ 1303.184368] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1303.186536] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28783d71-dec5-4418-8724-b683d333fdfa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.203916] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 67%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.205819] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1303.205819] env[68285]: value = "task-2892508" [ 1303.205819] env[68285]: _type = "Task" [ 1303.205819] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.219521] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.288110] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892507, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.460139] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1303.460433] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.443s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.460677] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1303.460817] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Cleaning up deleted instances {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1303.587246] env[68285]: DEBUG nova.compute.manager [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1303.587246] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1303.589841] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bac9a5e-7066-48d5-82ae-dddb96e96f9b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.605665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1303.606220] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-515fcea4-1e33-4b78-bf09-5817dabcd49c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.619151] env[68285]: DEBUG oslo_vmware.api [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1303.619151] env[68285]: value = "task-2892509" [ 1303.619151] env[68285]: _type = "Task" [ 1303.619151] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.635714] env[68285]: DEBUG oslo_vmware.api [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.696109] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.697099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.709360] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 81%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.730209] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892508, 'name': PowerOffVM_Task, 'duration_secs': 0.28842} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.731333] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1303.731627] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance 'faf810ae-7823-4115-a709-99dc7c480867' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1303.787668] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892507, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.877354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "c341075b-9d30-45db-9d83-f196bf90ecd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.877571] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.987094] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] There are 49 instances to clean {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1303.987424] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: bd3c9b84-794d-4302-bfb2-1181d5ad9552] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1304.133600] env[68285]: DEBUG oslo_vmware.api [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892509, 'name': PowerOffVM_Task, 'duration_secs': 0.375308} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.133896] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1304.134090] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1304.134375] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d65adb4b-ad84-4d68-8a04-5366539ae4fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.197411] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 95%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.199974] env[68285]: DEBUG nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1304.216151] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1304.216387] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1304.216572] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleting the datastore file [datastore1] 0329a534-0ba1-48df-aa9a-01d50bafab05 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1304.216846] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52d1fa07-39ad-4dd4-a9c6-12f356ac033c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.223599] env[68285]: DEBUG oslo_vmware.api [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1304.223599] env[68285]: value = "task-2892511" [ 1304.223599] env[68285]: _type = "Task" [ 1304.223599] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.233380] env[68285]: DEBUG oslo_vmware.api [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.241035] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1304.241035] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.241291] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1304.241373] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.241515] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1304.241669] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1304.241910] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1304.242029] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1304.242280] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1304.242507] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1304.242733] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1304.247922] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1f2d523-7e3e-444e-9346-afa7e4985401 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.264334] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1304.264334] env[68285]: value = "task-2892512" [ 1304.264334] env[68285]: _type = "Task" [ 1304.264334] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.272523] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892512, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.284167] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892507, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.381237] env[68285]: DEBUG nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1304.490767] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: a4fc942a-03e7-4415-bd95-f1f0e1344a69] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1304.698151] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 97%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.721197] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.721476] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.723245] env[68285]: INFO nova.compute.claims [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1304.734208] env[68285]: DEBUG oslo_vmware.api [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23567} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.734309] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1304.734487] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1304.734707] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1304.734883] env[68285]: INFO nova.compute.manager [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1304.735130] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1304.735867] env[68285]: DEBUG nova.compute.manager [-] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1304.735867] env[68285]: DEBUG nova.network.neutron [-] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1304.774700] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892512, 'name': ReconfigVM_Task, 'duration_secs': 0.139278} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.775077] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance 'faf810ae-7823-4115-a709-99dc7c480867' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1304.785783] env[68285]: DEBUG oslo_vmware.api [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892507, 'name': PowerOnVM_Task, 'duration_secs': 2.331145} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.786036] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1304.786235] env[68285]: INFO nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Took 9.29 seconds to spawn the instance on the hypervisor. [ 1304.786417] env[68285]: DEBUG nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1304.787144] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88922516-9293-429f-9a2f-6c3b82d2395f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.900247] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.980843] env[68285]: DEBUG nova.compute.manager [req-280e3ed7-c6a2-4f3a-9043-a5e9def6740a req-5a2029ea-4ec5-4026-acb4-2d1a3a2e1637 service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Received event network-vif-deleted-3bf95754-e92f-4854-b0aa-78333d1e73ba {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1304.980972] env[68285]: INFO nova.compute.manager [req-280e3ed7-c6a2-4f3a-9043-a5e9def6740a req-5a2029ea-4ec5-4026-acb4-2d1a3a2e1637 service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Neutron deleted interface 3bf95754-e92f-4854-b0aa-78333d1e73ba; detaching it from the instance and deleting it from the info cache [ 1304.981149] env[68285]: DEBUG nova.network.neutron [req-280e3ed7-c6a2-4f3a-9043-a5e9def6740a req-5a2029ea-4ec5-4026-acb4-2d1a3a2e1637 service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.994418] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 5c0a91a4-b247-4950-8c7c-c62afdc4860f] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1305.199489] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 97%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.283866] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1305.284121] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1305.284279] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1305.284458] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1305.284601] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1305.284742] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1305.284938] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1305.285108] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1305.285283] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1305.285435] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1305.285605] env[68285]: DEBUG nova.virt.hardware [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1305.290882] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1305.291171] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fbddd63-4cbf-41a8-9d63-ee87ff5fd028 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.310801] env[68285]: INFO nova.compute.manager [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Took 16.01 seconds to build instance. [ 1305.313670] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1305.313670] env[68285]: value = "task-2892513" [ 1305.313670] env[68285]: _type = "Task" [ 1305.313670] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.321861] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892513, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.465534] env[68285]: DEBUG nova.network.neutron [-] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.483545] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a6aef099-e0c5-43c3-bd6b-68c7955165c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.492878] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60438e12-c792-4104-8347-859ace2bd098 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.504215] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d4818c98-8134-4426-bd35-b2339ed6abd4] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1305.532048] env[68285]: DEBUG nova.compute.manager [req-280e3ed7-c6a2-4f3a-9043-a5e9def6740a req-5a2029ea-4ec5-4026-acb4-2d1a3a2e1637 service nova] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Detach interface failed, port_id=3bf95754-e92f-4854-b0aa-78333d1e73ba, reason: Instance 0329a534-0ba1-48df-aa9a-01d50bafab05 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1305.700941] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task} progress is 98%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.813294] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f3e7320c-6e72-4dd9-8276-bb5a8e5fc76a tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.524s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.825599] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892513, 'name': ReconfigVM_Task, 'duration_secs': 0.175153} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.825599] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1305.826286] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad4af87-fb30-4077-8d01-140b7e725f19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.850717] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] volume-bb993b4e-ec19-499c-a196-764a30b67abe/volume-bb993b4e-ec19-499c-a196-764a30b67abe.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1305.853137] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85f3cb0f-8d54-469c-954e-809c5347423b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.870669] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1305.870669] env[68285]: value = "task-2892514" [ 1305.870669] env[68285]: _type = "Task" [ 1305.870669] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.880207] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892514, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.968061] env[68285]: INFO nova.compute.manager [-] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Took 1.23 seconds to deallocate network for instance. [ 1305.974251] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7a40b3-4c2c-465b-8176-41cc41d64244 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.982306] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f824d3-be46-43d7-b5c7-baccf7c744a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.014198] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8917672f-3b0d-42a1-b8b1-94ac47ce941a] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1306.017558] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952725a5-ad9b-4d5e-8f28-b0dbde108785 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.026715] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7364c5-485f-4264-bbc9-cc6215398823 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.043730] env[68285]: DEBUG nova.compute.provider_tree [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1306.201405] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892505, 'name': RelocateVM_Task, 'duration_secs': 4.455595} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.201405] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1306.201405] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581076', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'name': 'volume-256c2839-790d-4956-aefd-ad8ce558c59d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8a598506-724f-48f6-91a8-1e02483e6aab', 'attached_at': '', 'detached_at': '', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'serial': '256c2839-790d-4956-aefd-ad8ce558c59d'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1306.202278] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b52dac-a232-4c65-b31b-3506abd8c897 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.217039] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e76dbcd-00fa-4b57-b0d2-a605393907ed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.242675] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-256c2839-790d-4956-aefd-ad8ce558c59d/volume-256c2839-790d-4956-aefd-ad8ce558c59d.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1306.242978] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5134417-5d0e-4e7d-9d0d-ae3719f32e82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.261657] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1306.261657] env[68285]: value = "task-2892515" [ 1306.261657] env[68285]: _type = "Task" [ 1306.261657] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.270075] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.380699] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892514, 'name': ReconfigVM_Task, 'duration_secs': 0.25961} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.381147] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfigured VM instance instance-0000006e to attach disk [datastore1] volume-bb993b4e-ec19-499c-a196-764a30b67abe/volume-bb993b4e-ec19-499c-a196-764a30b67abe.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1306.381315] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance 'faf810ae-7823-4115-a709-99dc7c480867' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1306.478900] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.517981] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8a649b1e-d007-4032-a46c-b479365e5289] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1306.547057] env[68285]: DEBUG nova.scheduler.client.report [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1306.771715] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892515, 'name': ReconfigVM_Task, 'duration_secs': 0.254238} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.772069] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-256c2839-790d-4956-aefd-ad8ce558c59d/volume-256c2839-790d-4956-aefd-ad8ce558c59d.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1306.776803] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea5d76f8-bbd7-4999-b164-176240a03a16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.791566] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1306.791566] env[68285]: value = "task-2892516" [ 1306.791566] env[68285]: _type = "Task" [ 1306.791566] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.799551] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892516, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.887829] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9033883-bc5d-49e7-a4f1-6e5c5b5738bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.906901] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7e59b4-4793-4002-98cd-ef147ce0c07e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.924153] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance 'faf810ae-7823-4115-a709-99dc7c480867' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1307.006064] env[68285]: DEBUG nova.compute.manager [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-changed-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1307.006064] env[68285]: DEBUG nova.compute.manager [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing instance network info cache due to event network-changed-10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1307.006064] env[68285]: DEBUG oslo_concurrency.lockutils [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] Acquiring lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.006064] env[68285]: DEBUG oslo_concurrency.lockutils [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] Acquired lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.006434] env[68285]: DEBUG nova.network.neutron [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing network info cache for port 10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1307.021474] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 49831327-6e13-412e-ab83-bf350e6e9761] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1307.051112] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.051883] env[68285]: DEBUG nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1307.054439] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.154s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.055883] env[68285]: INFO nova.compute.claims [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1307.303524] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892516, 'name': ReconfigVM_Task, 'duration_secs': 0.270938} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.303822] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581076', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'name': 'volume-256c2839-790d-4956-aefd-ad8ce558c59d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8a598506-724f-48f6-91a8-1e02483e6aab', 'attached_at': '', 'detached_at': '', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'serial': '256c2839-790d-4956-aefd-ad8ce558c59d'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1307.304363] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0f80190-0b13-49e4-8028-d6161935e944 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.310809] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1307.310809] env[68285]: value = "task-2892517" [ 1307.310809] env[68285]: _type = "Task" [ 1307.310809] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.318542] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892517, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.524448] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: ef87ff30-ef45-4abb-8696-d5493572703a] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1307.560746] env[68285]: DEBUG nova.compute.utils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1307.564709] env[68285]: DEBUG nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1307.564709] env[68285]: DEBUG nova.network.neutron [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1307.629025] env[68285]: DEBUG nova.policy [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1307.813141] env[68285]: DEBUG nova.network.neutron [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updated VIF entry in instance network info cache for port 10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.813450] env[68285]: DEBUG nova.network.neutron [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10199287-90", "ovs_interfaceid": "10199287-9009-48cc-b97a-e94229f7d640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.824574] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892517, 'name': Rename_Task, 'duration_secs': 0.295707} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.824834] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1307.825107] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ec1732e-0171-4d0e-95d7-16d473fee439 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.831361] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1307.831361] env[68285]: value = "task-2892518" [ 1307.831361] env[68285]: _type = "Task" [ 1307.831361] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.838706] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.984869] env[68285]: DEBUG nova.network.neutron [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Successfully created port: efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1308.027563] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 68aee959-4168-43a7-a8d1-e6e126a52da5] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1308.065763] env[68285]: DEBUG nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1308.278142] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450f588a-bbb6-49c7-805c-c2346ecfde86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.286189] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81934676-96d0-45a5-b461-e21c33ccfda4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.317639] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994255b2-85dd-43f7-8a91-4106c8bc94d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.320265] env[68285]: DEBUG oslo_concurrency.lockutils [req-07a538dd-95f1-426f-8dd9-80f66cf2973f req-dd71fefd-abea-43fe-8890-0562c1248514 service nova] Releasing lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.325500] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa87ae1-e404-4d72-8bc9-6cb0d64178f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.339030] env[68285]: DEBUG nova.compute.provider_tree [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.347518] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892518, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.531692] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: fe6c495f-6917-4e3d-acce-7487a45e3ef4] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1308.585119] env[68285]: DEBUG nova.network.neutron [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Port c311cfc8-4f78-4068-8841-8aa0ce5243c2 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1308.844209] env[68285]: DEBUG nova.scheduler.client.report [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1308.850248] env[68285]: DEBUG oslo_vmware.api [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892518, 'name': PowerOnVM_Task, 'duration_secs': 0.839156} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.850702] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1308.850905] env[68285]: INFO nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1308.851095] env[68285]: DEBUG nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1308.851862] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa15ae7-bfd9-4bc1-a53a-6e35fe41cba3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.034761] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 2eec5d74-b1b8-4714-aaf1-687ec56ad860] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1309.078314] env[68285]: DEBUG nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1309.105173] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1309.105435] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1309.105594] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1309.105773] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1309.105917] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1309.106077] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1309.106287] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1309.106448] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1309.106614] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1309.106775] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1309.106943] env[68285]: DEBUG nova.virt.hardware [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1309.107798] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fff363a-4485-479c-a8e4-087b3547c874 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.116563] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f98768-635f-47c7-a501-79e25eabdb1c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.335887] env[68285]: DEBUG nova.compute.manager [req-91d18f74-c1a7-4147-8024-339a0e5452c7 req-bc5d3618-b16b-431a-8b52-6de4352d4018 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-vif-plugged-efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1309.336150] env[68285]: DEBUG oslo_concurrency.lockutils [req-91d18f74-c1a7-4147-8024-339a0e5452c7 req-bc5d3618-b16b-431a-8b52-6de4352d4018 service nova] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.336357] env[68285]: DEBUG oslo_concurrency.lockutils [req-91d18f74-c1a7-4147-8024-339a0e5452c7 req-bc5d3618-b16b-431a-8b52-6de4352d4018 service nova] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.336524] env[68285]: DEBUG oslo_concurrency.lockutils [req-91d18f74-c1a7-4147-8024-339a0e5452c7 req-bc5d3618-b16b-431a-8b52-6de4352d4018 service nova] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.336888] env[68285]: DEBUG nova.compute.manager [req-91d18f74-c1a7-4147-8024-339a0e5452c7 req-bc5d3618-b16b-431a-8b52-6de4352d4018 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] No waiting events found dispatching network-vif-plugged-efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1309.337141] env[68285]: WARNING nova.compute.manager [req-91d18f74-c1a7-4147-8024-339a0e5452c7 req-bc5d3618-b16b-431a-8b52-6de4352d4018 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received unexpected event network-vif-plugged-efe1cc65-a9a1-4768-81db-53da716df13a for instance with vm_state building and task_state spawning. [ 1309.352114] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.298s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.352514] env[68285]: DEBUG nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1309.354985] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.876s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.359030] env[68285]: DEBUG nova.objects.instance [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'resources' on Instance uuid 0329a534-0ba1-48df-aa9a-01d50bafab05 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1309.368824] env[68285]: INFO nova.compute.manager [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Took 18.71 seconds to build instance. [ 1309.425142] env[68285]: DEBUG nova.network.neutron [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Successfully updated port: efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1309.538461] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9175fd25-a00c-4a2c-b779-56e6541dcaa1] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1309.608462] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "faf810ae-7823-4115-a709-99dc7c480867-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.608674] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.608849] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.834931] env[68285]: DEBUG oslo_concurrency.lockutils [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.835213] env[68285]: DEBUG oslo_concurrency.lockutils [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.857683] env[68285]: DEBUG nova.compute.utils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1309.861949] env[68285]: DEBUG nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1309.862141] env[68285]: DEBUG nova.network.neutron [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1309.871341] env[68285]: DEBUG oslo_concurrency.lockutils [None req-533b0a0c-4f80-4789-af65-fa844a077548 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "8a598506-724f-48f6-91a8-1e02483e6aab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.236s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.900999] env[68285]: DEBUG nova.policy [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5ee32979c0f43a2871e145e459e4240', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f732a9946001482bb76dee4e2cf844c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1309.927354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.927501] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.927649] env[68285]: DEBUG nova.network.neutron [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1310.041050] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 08c50ae2-a5fd-4b68-bdd8-1a3768b5de8d] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1310.102732] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1b637a-b320-4131-be93-d93f7b9067a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.112352] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8357ec31-1798-4baa-90ee-10fbe9dbb96f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.149324] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251fcca8-e288-43f9-a2f3-bd556d06c5d6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.164492] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71ea14d-5adb-4226-b1c3-501ad2256f0b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.170534] env[68285]: DEBUG nova.compute.manager [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Received event network-changed-d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1310.170725] env[68285]: DEBUG nova.compute.manager [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Refreshing instance network info cache due to event network-changed-d215cd64-22e2-46be-88b2-f3185156486b. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1310.170933] env[68285]: DEBUG oslo_concurrency.lockutils [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] Acquiring lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.171215] env[68285]: DEBUG oslo_concurrency.lockutils [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] Acquired lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1310.171401] env[68285]: DEBUG nova.network.neutron [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Refreshing network info cache for port d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.183427] env[68285]: DEBUG nova.compute.provider_tree [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.339972] env[68285]: DEBUG nova.compute.utils [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1310.365055] env[68285]: DEBUG nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1310.399322] env[68285]: DEBUG nova.network.neutron [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Successfully created port: e08da820-30b8-48ec-b099-d1f963c95d5e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1310.472422] env[68285]: DEBUG nova.network.neutron [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1310.545761] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: fe8e0a71-e9b0-4035-a696-51455d6fc473] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1310.673214] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.673428] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1310.673818] env[68285]: DEBUG nova.network.neutron [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1310.686278] env[68285]: DEBUG nova.network.neutron [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.687903] env[68285]: DEBUG nova.scheduler.client.report [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1310.841641] env[68285]: DEBUG oslo_concurrency.lockutils [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.980858] env[68285]: DEBUG nova.network.neutron [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Updated VIF entry in instance network info cache for port d215cd64-22e2-46be-88b2-f3185156486b. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1310.981265] env[68285]: DEBUG nova.network.neutron [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Updating instance_info_cache with network_info: [{"id": "d215cd64-22e2-46be-88b2-f3185156486b", "address": "fa:16:3e:8f:74:88", "network": {"id": "c9e39941-7f0e-4dda-8088-ed1daee93ef3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-530210684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f627acd23dc4889a2d0ca8dd1ea865a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd215cd64-22", "ovs_interfaceid": "d215cd64-22e2-46be-88b2-f3185156486b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.049366] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 75b9c202-b50d-4c59-b3ef-03e61225a1dc] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1311.193043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.193043] env[68285]: DEBUG nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Instance network_info: |[{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1311.193281] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.838s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.195481] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:d9:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efe1cc65-a9a1-4768-81db-53da716df13a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1311.203214] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1311.203960] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1311.204208] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f740437f-7ed4-4b49-8d1a-925806b0f808 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.226741] env[68285]: INFO nova.scheduler.client.report [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted allocations for instance 0329a534-0ba1-48df-aa9a-01d50bafab05 [ 1311.230084] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1311.230084] env[68285]: value = "task-2892519" [ 1311.230084] env[68285]: _type = "Task" [ 1311.230084] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.240621] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892519, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.366926] env[68285]: DEBUG nova.compute.manager [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-changed-efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1311.367216] env[68285]: DEBUG nova.compute.manager [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing instance network info cache due to event network-changed-efe1cc65-a9a1-4768-81db-53da716df13a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1311.367444] env[68285]: DEBUG oslo_concurrency.lockutils [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.367597] env[68285]: DEBUG oslo_concurrency.lockutils [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.368258] env[68285]: DEBUG nova.network.neutron [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing network info cache for port efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.371727] env[68285]: DEBUG nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1311.406115] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1311.406410] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.406587] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1311.406778] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.406950] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1311.407252] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1311.407539] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1311.407852] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1311.408132] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1311.408410] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1311.408631] env[68285]: DEBUG nova.virt.hardware [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1311.410201] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5bed98-d337-4e0e-93ad-8c9d3db044f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.420640] env[68285]: DEBUG nova.network.neutron [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [{"id": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "address": "fa:16:3e:12:c5:35", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc311cfc8-4f", "ovs_interfaceid": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.425418] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e8caa8-2d49-488a-b64a-3231e272f8e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.436143] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.483687] env[68285]: DEBUG oslo_concurrency.lockutils [req-dddc6256-94bd-4b52-a070-53f7f31123ef req-e2230166-2871-42be-8398-a9dbd485b199 service nova] Releasing lock "refresh_cache-8a598506-724f-48f6-91a8-1e02483e6aab" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.552579] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 5abddda1-9bf7-4039-81c7-8622f43cc72e] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1311.739035] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4152f32c-8bc3-47bf-92ba-3c2403fd02ba tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "0329a534-0ba1-48df-aa9a-01d50bafab05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.665s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.744040] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892519, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.946894] env[68285]: DEBUG oslo_concurrency.lockutils [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.946894] env[68285]: DEBUG oslo_concurrency.lockutils [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.946894] env[68285]: INFO nova.compute.manager [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Attaching volume 6bb48a0e-a10d-4be0-a276-644bf6d1632d to /dev/sdb [ 1311.950537] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d219e79f-8957-40bd-aea4-fd654087cbc5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.962229] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05695d03-fb6c-4e0a-9f70-a0322e27932b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.991618] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de801ea2-0001-4a36-ae77-5e61b3f34ed0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.991618] env[68285]: DEBUG nova.network.neutron [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Successfully updated port: e08da820-30b8-48ec-b099-d1f963c95d5e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1311.997340] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294d3900-6f41-4fb2-9b86-8af6c34cdfe3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.013563] env[68285]: DEBUG nova.virt.block_device [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating existing volume attachment record: b3a23900-bbfd-46bd-bece-0ea28e29afd5 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1312.055647] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8fd23cb4-45da-4bd9-a258-845eb3f6a1dc] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1312.241980] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892519, 'name': CreateVM_Task, 'duration_secs': 0.556816} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.242142] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1312.242889] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.243099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.243583] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1312.243871] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95af7605-d765-4a7b-b460-0e12a530baf4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.248697] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1312.248697] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5231b859-3f28-230c-1c01-00921149786c" [ 1312.248697] env[68285]: _type = "Task" [ 1312.248697] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.256378] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5231b859-3f28-230c-1c01-00921149786c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.257206] env[68285]: DEBUG nova.network.neutron [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updated VIF entry in instance network info cache for port efe1cc65-a9a1-4768-81db-53da716df13a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1312.257929] env[68285]: DEBUG nova.network.neutron [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.497953] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.498177] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.498360] env[68285]: DEBUG nova.network.neutron [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1312.559863] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 589d1560-9269-4de2-bd79-454ebdaa40d4] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1312.759586] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5231b859-3f28-230c-1c01-00921149786c, 'name': SearchDatastore_Task, 'duration_secs': 0.016041} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.759821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.760062] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1312.760314] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.760482] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.760668] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1312.760917] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db11c830-3e93-46dc-a992-8308aa7522f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.762817] env[68285]: DEBUG oslo_concurrency.lockutils [req-3eefd0cf-7ccd-4cb1-b590-503cd232decb req-a6d86dee-3bf2-47c7-a4f5-e97910824ed4 service nova] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.769256] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1312.769453] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1312.770174] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c71619c5-6341-4e04-9d2c-ce5e36996bfc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.775295] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1312.775295] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ae5f68-7812-8da8-697c-4a99a4b9ddf6" [ 1312.775295] env[68285]: _type = "Task" [ 1312.775295] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.782337] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ae5f68-7812-8da8-697c-4a99a4b9ddf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.938841] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.939082] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.029935] env[68285]: DEBUG nova.network.neutron [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1313.062876] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: ce780600-5dc9-4a60-b54e-415cd1766ffb] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1313.090726] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f37af9d-a272-4e8a-b815-409bfb7077c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.113985] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5e3afb-4abb-4996-a55c-a859c26f6999 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.120033] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance 'faf810ae-7823-4115-a709-99dc7c480867' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1313.185196] env[68285]: DEBUG nova.network.neutron [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Updating instance_info_cache with network_info: [{"id": "e08da820-30b8-48ec-b099-d1f963c95d5e", "address": "fa:16:3e:86:fd:df", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape08da820-30", "ovs_interfaceid": "e08da820-30b8-48ec-b099-d1f963c95d5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.285384] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ae5f68-7812-8da8-697c-4a99a4b9ddf6, 'name': SearchDatastore_Task, 'duration_secs': 0.008225} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.286114] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0c0bc10-6ca1-49f0-9982-e76d8eea0d8f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.291389] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1313.291389] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5247a49b-dbe0-a00d-df6b-fbea17922649" [ 1313.291389] env[68285]: _type = "Task" [ 1313.291389] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.298523] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5247a49b-dbe0-a00d-df6b-fbea17922649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.394583] env[68285]: DEBUG nova.compute.manager [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Received event network-vif-plugged-e08da820-30b8-48ec-b099-d1f963c95d5e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1313.394583] env[68285]: DEBUG oslo_concurrency.lockutils [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] Acquiring lock "c341075b-9d30-45db-9d83-f196bf90ecd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.394583] env[68285]: DEBUG oslo_concurrency.lockutils [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.394583] env[68285]: DEBUG oslo_concurrency.lockutils [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.394869] env[68285]: DEBUG nova.compute.manager [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] No waiting events found dispatching network-vif-plugged-e08da820-30b8-48ec-b099-d1f963c95d5e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1313.394869] env[68285]: WARNING nova.compute.manager [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Received unexpected event network-vif-plugged-e08da820-30b8-48ec-b099-d1f963c95d5e for instance with vm_state building and task_state spawning. [ 1313.395035] env[68285]: DEBUG nova.compute.manager [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Received event network-changed-e08da820-30b8-48ec-b099-d1f963c95d5e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1313.395202] env[68285]: DEBUG nova.compute.manager [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Refreshing instance network info cache due to event network-changed-e08da820-30b8-48ec-b099-d1f963c95d5e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1313.395367] env[68285]: DEBUG oslo_concurrency.lockutils [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] Acquiring lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.441453] env[68285]: DEBUG nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1313.566430] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 3094ed52-33c2-40ff-ac77-6bb975a2f681] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1313.626204] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1313.626528] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7688bef1-3d1b-4a2e-8861-d784232d2b4e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.634200] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1313.634200] env[68285]: value = "task-2892523" [ 1313.634200] env[68285]: _type = "Task" [ 1313.634200] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.642244] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892523, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.688127] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.688478] env[68285]: DEBUG nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Instance network_info: |[{"id": "e08da820-30b8-48ec-b099-d1f963c95d5e", "address": "fa:16:3e:86:fd:df", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape08da820-30", "ovs_interfaceid": "e08da820-30b8-48ec-b099-d1f963c95d5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1313.688790] env[68285]: DEBUG oslo_concurrency.lockutils [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] Acquired lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.688967] env[68285]: DEBUG nova.network.neutron [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Refreshing network info cache for port e08da820-30b8-48ec-b099-d1f963c95d5e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.690263] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:fd:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e08da820-30b8-48ec-b099-d1f963c95d5e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1313.697717] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Creating folder: Project (f732a9946001482bb76dee4e2cf844c2). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1313.698711] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aab5fceb-c69f-455a-bde6-44784e94c55e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.710215] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Created folder: Project (f732a9946001482bb76dee4e2cf844c2) in parent group-v580775. [ 1313.710448] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Creating folder: Instances. Parent ref: group-v581087. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1313.710670] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50743184-16e2-4291-a59a-8acf03befa61 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.719153] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Created folder: Instances in parent group-v581087. [ 1313.719368] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1313.719546] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1313.719733] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ed3118d-ee5b-45e1-bd18-e83633a9f6dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.738616] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1313.738616] env[68285]: value = "task-2892526" [ 1313.738616] env[68285]: _type = "Task" [ 1313.738616] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.746051] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892526, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.804502] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5247a49b-dbe0-a00d-df6b-fbea17922649, 'name': SearchDatastore_Task, 'duration_secs': 0.009481} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.804906] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.805376] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9ddeb48e-ef72-4e6e-9058-d45ebde7583e/9ddeb48e-ef72-4e6e-9058-d45ebde7583e.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1313.805765] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1d091f6-1216-46a6-aa71-476c2baf89d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.813314] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1313.813314] env[68285]: value = "task-2892527" [ 1313.813314] env[68285]: _type = "Task" [ 1313.813314] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.821834] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.965819] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.966128] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.968071] env[68285]: INFO nova.compute.claims [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1314.070516] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 15fd3159-0fff-461d-96ce-f8cfc04eff32] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1314.146071] env[68285]: DEBUG oslo_vmware.api [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892523, 'name': PowerOnVM_Task, 'duration_secs': 0.43161} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.146071] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1314.146290] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273ed9-587a-455c-8c72-1cc0749cab96 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance 'faf810ae-7823-4115-a709-99dc7c480867' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1314.248920] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892526, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.322709] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892527, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.465084] env[68285]: DEBUG nova.network.neutron [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Updated VIF entry in instance network info cache for port e08da820-30b8-48ec-b099-d1f963c95d5e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1314.465561] env[68285]: DEBUG nova.network.neutron [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Updating instance_info_cache with network_info: [{"id": "e08da820-30b8-48ec-b099-d1f963c95d5e", "address": "fa:16:3e:86:fd:df", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape08da820-30", "ovs_interfaceid": "e08da820-30b8-48ec-b099-d1f963c95d5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.574222] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 7790f1e6-c73f-40d6-97af-00e9c518a09c] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1314.749058] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892526, 'name': CreateVM_Task, 'duration_secs': 0.554259} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.749058] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1314.749607] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.749780] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.750118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1314.750386] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2019e739-dadc-443a-bf44-1a177cdeeca5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.754755] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1314.754755] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520309fc-95eb-fb1a-4a36-c7091546316b" [ 1314.754755] env[68285]: _type = "Task" [ 1314.754755] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.762453] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520309fc-95eb-fb1a-4a36-c7091546316b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.823769] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892527, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557616} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.824114] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9ddeb48e-ef72-4e6e-9058-d45ebde7583e/9ddeb48e-ef72-4e6e-9058-d45ebde7583e.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1314.824287] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1314.824528] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f2e300f-633b-4a05-93cd-3ac8f61ce3b3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.831391] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1314.831391] env[68285]: value = "task-2892529" [ 1314.831391] env[68285]: _type = "Task" [ 1314.831391] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.839148] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.968601] env[68285]: DEBUG oslo_concurrency.lockutils [req-1e3ac5cf-f7f4-443b-b808-9dc9daeea2bf req-8e6623ba-6b58-4bbd-bb80-40529fa13cb6 service nova] Releasing lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.078335] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: ed25ab5e-6377-4bf6-863e-9ffc3fe66bcf] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1315.188697] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934f325e-290a-4ae1-9ac7-2808e57f60ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.198608] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4d1bb5-aa36-4597-95af-6f842f13b74d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.229487] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c926e206-3e8d-4b36-a94b-13fc4cad326f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.237512] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28809266-5773-467d-a4c9-cfc3683ef440 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.251247] env[68285]: DEBUG nova.compute.provider_tree [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1315.264031] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520309fc-95eb-fb1a-4a36-c7091546316b, 'name': SearchDatastore_Task, 'duration_secs': 0.013084} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.264311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.264537] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1315.264760] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.264902] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.265092] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1315.265327] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f702f5a-5633-42b9-9250-1b2a15874b96 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.281415] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1315.281581] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1315.282286] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd773377-85bd-462f-b467-d35e0cbfa94c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.287159] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1315.287159] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]526fddfb-0d84-9e64-0378-c76d00fe0a8a" [ 1315.287159] env[68285]: _type = "Task" [ 1315.287159] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.293791] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526fddfb-0d84-9e64-0378-c76d00fe0a8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.339826] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064365} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.340099] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1315.340834] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f709bc3-2e43-43a3-b964-ebe8594495a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.363867] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 9ddeb48e-ef72-4e6e-9058-d45ebde7583e/9ddeb48e-ef72-4e6e-9058-d45ebde7583e.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1315.364169] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a57445e-a359-469f-97ef-c69086b1aea8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.383831] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1315.383831] env[68285]: value = "task-2892530" [ 1315.383831] env[68285]: _type = "Task" [ 1315.383831] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.392116] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.582026] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 94652533-8c34-42fa-8d70-4effc307ec71] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1315.670158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.671349] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.671349] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.671487] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.671706] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.674151] env[68285]: INFO nova.compute.manager [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Terminating instance [ 1315.754016] env[68285]: DEBUG nova.scheduler.client.report [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1315.797046] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526fddfb-0d84-9e64-0378-c76d00fe0a8a, 'name': SearchDatastore_Task, 'duration_secs': 0.053123} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.797797] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-059a8bd9-6a2e-4dc7-9ae0-1f647c5c7afb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.803622] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1315.803622] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52381abc-6027-f687-b072-8468cb07ac20" [ 1315.803622] env[68285]: _type = "Task" [ 1315.803622] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.810747] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52381abc-6027-f687-b072-8468cb07ac20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.893910] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892530, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.085207] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8c299247-896d-4ff1-b73a-22a71ec972fd] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1316.177859] env[68285]: DEBUG nova.compute.manager [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1316.178164] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1316.179132] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d947087-9be1-4453-9697-5fc3f36cc4dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.187610] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1316.187934] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9500faa0-0399-43b4-bec0-4c106dfbc5ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.195049] env[68285]: DEBUG oslo_vmware.api [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1316.195049] env[68285]: value = "task-2892531" [ 1316.195049] env[68285]: _type = "Task" [ 1316.195049] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.258660] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1316.259319] env[68285]: DEBUG nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1316.315037] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52381abc-6027-f687-b072-8468cb07ac20, 'name': SearchDatastore_Task, 'duration_secs': 0.034872} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.315222] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.315322] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/c341075b-9d30-45db-9d83-f196bf90ecd3.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1316.315588] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2df4e216-2e0c-4153-9d61-0bed2e3360a5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.327974] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1316.327974] env[68285]: value = "task-2892532" [ 1316.327974] env[68285]: _type = "Task" [ 1316.327974] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.337507] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.396250] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892530, 'name': ReconfigVM_Task, 'duration_secs': 0.64896} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.396703] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 9ddeb48e-ef72-4e6e-9058-d45ebde7583e/9ddeb48e-ef72-4e6e-9058-d45ebde7583e.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1316.397370] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cb3fa02-0356-49f0-91e9-5ec59eb96acf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.405123] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1316.405123] env[68285]: value = "task-2892533" [ 1316.405123] env[68285]: _type = "Task" [ 1316.405123] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.414114] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892533, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.588870] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1a040977-b57e-4b67-b259-065b788141de] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1316.705568] env[68285]: DEBUG oslo_vmware.api [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892531, 'name': PowerOffVM_Task, 'duration_secs': 0.226959} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.705831] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1316.706013] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1316.706303] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8da36b38-6f21-4895-9777-2acb8bc9d082 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.765396] env[68285]: DEBUG nova.compute.utils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1316.766899] env[68285]: DEBUG nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1316.767056] env[68285]: DEBUG nova.network.neutron [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1316.788793] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1316.789073] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1316.789284] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Deleting the datastore file [datastore2] 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1316.790089] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b613919e-2856-4ec6-98ea-aeba1d542a3b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.796776] env[68285]: DEBUG oslo_vmware.api [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for the task: (returnval){ [ 1316.796776] env[68285]: value = "task-2892535" [ 1316.796776] env[68285]: _type = "Task" [ 1316.796776] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.806128] env[68285]: DEBUG oslo_vmware.api [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.807969] env[68285]: DEBUG nova.policy [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '184360cab7224b9eaef80dfe89d0208b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '288595d9298e43fa859bc6b68054aa08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1316.824019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "faf810ae-7823-4115-a709-99dc7c480867" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.824404] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.824681] env[68285]: DEBUG nova.compute.manager [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Going to confirm migration 8 {{(pid=68285) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1316.837707] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892532, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.914610] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892533, 'name': Rename_Task, 'duration_secs': 0.245913} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.914875] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1316.915142] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52d064e8-4e59-4fa2-bd61-2fcd4996657d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.920872] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1316.920872] env[68285]: value = "task-2892536" [ 1316.920872] env[68285]: _type = "Task" [ 1316.920872] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.928298] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892536, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.043020] env[68285]: DEBUG nova.network.neutron [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Successfully created port: 75725a79-82bc-49ae-a645-d04ed26d28a7 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1317.060448] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1317.060741] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581086', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'name': 'volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '852ab501-00a6-442b-804a-1bbf49a2be8c', 'attached_at': '', 'detached_at': '', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'serial': '6bb48a0e-a10d-4be0-a276-644bf6d1632d'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1317.061587] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d87bfa-e827-4372-ac9c-36fb292f9baf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.078295] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3568e05b-2a87-4b82-8b5e-17501bbe46f8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.097380] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8ebbf943-2cef-4c99-a1c4-b1d213fd9884] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1317.107750] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d/volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.108550] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a6bf34f-7ade-48fb-819a-ee313206411d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.127177] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1317.127177] env[68285]: value = "task-2892537" [ 1317.127177] env[68285]: _type = "Task" [ 1317.127177] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.137811] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892537, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.270785] env[68285]: DEBUG nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1317.307495] env[68285]: DEBUG oslo_vmware.api [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.340958] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892532, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553711} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.341230] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/c341075b-9d30-45db-9d83-f196bf90ecd3.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1317.341436] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1317.341684] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d927045e-ac1a-4e17-81fc-2ffd8a3fabe6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.349870] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1317.349870] env[68285]: value = "task-2892538" [ 1317.349870] env[68285]: _type = "Task" [ 1317.349870] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.357958] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892538, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.393943] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.394189] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquired lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.394433] env[68285]: DEBUG nova.network.neutron [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1317.394687] env[68285]: DEBUG nova.objects.instance [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'info_cache' on Instance uuid faf810ae-7823-4115-a709-99dc7c480867 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.431477] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892536, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.608745] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d025b807-fda4-4aff-beac-0ad6a092fe74] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1317.638809] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.807792] env[68285]: DEBUG oslo_vmware.api [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.859344] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892538, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072082} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.859685] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1317.860491] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d53734-d893-4822-bd06-d51cdf9cb49c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.881997] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/c341075b-9d30-45db-9d83-f196bf90ecd3.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.882572] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2e417ff-427b-47c8-8ebe-3cae450c710c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.904038] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1317.904038] env[68285]: value = "task-2892539" [ 1317.904038] env[68285]: _type = "Task" [ 1317.904038] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.912571] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892539, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.933724] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892536, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.112522] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: cbf2a387-8a5a-4400-833b-e04e23ca42f7] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1318.139458] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.280301] env[68285]: DEBUG nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1318.306724] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1318.306987] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1318.307161] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1318.307343] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1318.307562] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1318.307636] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1318.307885] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1318.307984] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1318.308161] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1318.308321] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1318.308487] env[68285]: DEBUG nova.virt.hardware [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1318.309277] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0870b3bb-3e6c-4d55-8df9-40c0fd91d0a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.314644] env[68285]: DEBUG oslo_vmware.api [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Task: {'id': task-2892535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.028666} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.315216] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1318.315417] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1318.315598] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1318.315769] env[68285]: INFO nova.compute.manager [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Took 2.14 seconds to destroy the instance on the hypervisor. [ 1318.316009] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1318.316199] env[68285]: DEBUG nova.compute.manager [-] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1318.316293] env[68285]: DEBUG nova.network.neutron [-] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1318.320728] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08082887-6707-4741-a428-2de57a286012 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.421184] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892539, 'name': ReconfigVM_Task, 'duration_secs': 0.239882} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.422481] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Reconfigured VM instance instance-00000075 to attach disk [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/c341075b-9d30-45db-9d83-f196bf90ecd3.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.423978] env[68285]: DEBUG nova.compute.manager [req-7181129a-6c3a-438c-9b55-c23fd39d282f req-335f241f-9e72-417b-a82d-1f754d5aaab3 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Received event network-vif-plugged-75725a79-82bc-49ae-a645-d04ed26d28a7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1318.424199] env[68285]: DEBUG oslo_concurrency.lockutils [req-7181129a-6c3a-438c-9b55-c23fd39d282f req-335f241f-9e72-417b-a82d-1f754d5aaab3 service nova] Acquiring lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.424401] env[68285]: DEBUG oslo_concurrency.lockutils [req-7181129a-6c3a-438c-9b55-c23fd39d282f req-335f241f-9e72-417b-a82d-1f754d5aaab3 service nova] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.424567] env[68285]: DEBUG oslo_concurrency.lockutils [req-7181129a-6c3a-438c-9b55-c23fd39d282f req-335f241f-9e72-417b-a82d-1f754d5aaab3 service nova] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.424733] env[68285]: DEBUG nova.compute.manager [req-7181129a-6c3a-438c-9b55-c23fd39d282f req-335f241f-9e72-417b-a82d-1f754d5aaab3 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] No waiting events found dispatching network-vif-plugged-75725a79-82bc-49ae-a645-d04ed26d28a7 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1318.424893] env[68285]: WARNING nova.compute.manager [req-7181129a-6c3a-438c-9b55-c23fd39d282f req-335f241f-9e72-417b-a82d-1f754d5aaab3 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Received unexpected event network-vif-plugged-75725a79-82bc-49ae-a645-d04ed26d28a7 for instance with vm_state building and task_state spawning. [ 1318.425183] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf1d67dd-8c64-4fd3-b26a-ce6bc62689b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.436435] env[68285]: DEBUG oslo_vmware.api [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892536, 'name': PowerOnVM_Task, 'duration_secs': 1.352245} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.437582] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.437783] env[68285]: INFO nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Took 9.36 seconds to spawn the instance on the hypervisor. [ 1318.437964] env[68285]: DEBUG nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1318.438288] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1318.438288] env[68285]: value = "task-2892540" [ 1318.438288] env[68285]: _type = "Task" [ 1318.438288] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.438943] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8c60fe-71b3-46bd-a075-311f918d2d3e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.452858] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892540, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.499872] env[68285]: DEBUG nova.network.neutron [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Successfully updated port: 75725a79-82bc-49ae-a645-d04ed26d28a7 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.615738] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9c190abd-23ee-4e8e-8b91-9050847581d5] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1318.640617] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892537, 'name': ReconfigVM_Task, 'duration_secs': 1.060746} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.640617] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d/volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.645499] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41ae0d3b-8818-401a-a802-18f76b86813d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.659527] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1318.659527] env[68285]: value = "task-2892541" [ 1318.659527] env[68285]: _type = "Task" [ 1318.659527] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.671160] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.766525] env[68285]: DEBUG nova.network.neutron [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [{"id": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "address": "fa:16:3e:12:c5:35", "network": {"id": "53ea04ee-4511-482b-bccd-ad5d0afcc49d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1214018874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4471597d3345443aa28b97acd91847e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc311cfc8-4f", "ovs_interfaceid": "c311cfc8-4f78-4068-8841-8aa0ce5243c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.952638] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892540, 'name': Rename_Task, 'duration_secs': 0.148819} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.953579] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1318.953579] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d431723-c840-47a4-9bba-bd6498100aee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.963394] env[68285]: DEBUG nova.compute.manager [req-9abf894d-f3dd-4927-8d2d-c2d301187445 req-5657e70c-4bcc-4612-9dd9-1dfaf0aadfa7 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Received event network-vif-deleted-c5dfeee8-308e-441b-8f3c-84d8c2738b4e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1318.963613] env[68285]: INFO nova.compute.manager [req-9abf894d-f3dd-4927-8d2d-c2d301187445 req-5657e70c-4bcc-4612-9dd9-1dfaf0aadfa7 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Neutron deleted interface c5dfeee8-308e-441b-8f3c-84d8c2738b4e; detaching it from the instance and deleting it from the info cache [ 1318.963792] env[68285]: DEBUG nova.network.neutron [req-9abf894d-f3dd-4927-8d2d-c2d301187445 req-5657e70c-4bcc-4612-9dd9-1dfaf0aadfa7 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.965256] env[68285]: INFO nova.compute.manager [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Took 14.26 seconds to build instance. [ 1318.969812] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1318.969812] env[68285]: value = "task-2892542" [ 1318.969812] env[68285]: _type = "Task" [ 1318.969812] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.976369] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.002720] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "refresh_cache-e449ac04-e05c-4134-95b3-4bbc45fa26e4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.002823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "refresh_cache-e449ac04-e05c-4134-95b3-4bbc45fa26e4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1319.003018] env[68285]: DEBUG nova.network.neutron [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.121654] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d0b04097-292a-47e7-8f14-199b1650dc2c] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1319.172200] env[68285]: DEBUG oslo_vmware.api [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892541, 'name': ReconfigVM_Task, 'duration_secs': 0.149198} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.172587] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581086', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'name': 'volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '852ab501-00a6-442b-804a-1bbf49a2be8c', 'attached_at': '', 'detached_at': '', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'serial': '6bb48a0e-a10d-4be0-a276-644bf6d1632d'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1319.269757] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Releasing lock "refresh_cache-faf810ae-7823-4115-a709-99dc7c480867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1319.270135] env[68285]: DEBUG nova.objects.instance [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'migration_context' on Instance uuid faf810ae-7823-4115-a709-99dc7c480867 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1319.414595] env[68285]: DEBUG nova.network.neutron [-] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.467875] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1bb70efc-0cc8-4dd1-9603-7217f3a53dc5 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.771s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.468156] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09499278-dd76-4e39-9d2d-0ccd6238365a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.482860] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892542, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.484882] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c52b03-484b-439e-92d1-781f0725e93d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.522323] env[68285]: DEBUG nova.compute.manager [req-9abf894d-f3dd-4927-8d2d-c2d301187445 req-5657e70c-4bcc-4612-9dd9-1dfaf0aadfa7 service nova] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Detach interface failed, port_id=c5dfeee8-308e-441b-8f3c-84d8c2738b4e, reason: Instance 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1319.545728] env[68285]: DEBUG nova.network.neutron [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1319.625760] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: bb806297-47c6-45b7-a177-f3300fa1e29a] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1319.699152] env[68285]: DEBUG nova.network.neutron [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Updating instance_info_cache with network_info: [{"id": "75725a79-82bc-49ae-a645-d04ed26d28a7", "address": "fa:16:3e:4b:1a:79", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75725a79-82", "ovs_interfaceid": "75725a79-82bc-49ae-a645-d04ed26d28a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.773176] env[68285]: DEBUG nova.objects.base [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1319.774489] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9effdf9-42ca-449a-9c69-3c79a097486b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.796036] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15b778e7-415b-40c2-8001-5160400164d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.803860] env[68285]: DEBUG oslo_vmware.api [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1319.803860] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5227c741-b3e9-81aa-4e7d-21db6dbedfb8" [ 1319.803860] env[68285]: _type = "Task" [ 1319.803860] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.812824] env[68285]: DEBUG oslo_vmware.api [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5227c741-b3e9-81aa-4e7d-21db6dbedfb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.917374] env[68285]: INFO nova.compute.manager [-] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Took 1.60 seconds to deallocate network for instance. [ 1319.981216] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892542, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.133465] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: be47df2a-aee7-4275-9acb-9cf74367f503] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1320.202012] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "refresh_cache-e449ac04-e05c-4134-95b3-4bbc45fa26e4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.202755] env[68285]: DEBUG nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Instance network_info: |[{"id": "75725a79-82bc-49ae-a645-d04ed26d28a7", "address": "fa:16:3e:4b:1a:79", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75725a79-82", "ovs_interfaceid": "75725a79-82bc-49ae-a645-d04ed26d28a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1320.203083] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:1a:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51ae336c-12cf-406a-b1ca-54e9ce553b3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75725a79-82bc-49ae-a645-d04ed26d28a7', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1320.210633] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1320.211443] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1320.211922] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45fb664b-884a-4c35-be0c-7bb06ec13e46 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.227507] env[68285]: DEBUG nova.objects.instance [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lazy-loading 'flavor' on Instance uuid 852ab501-00a6-442b-804a-1bbf49a2be8c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1320.235395] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1320.235395] env[68285]: value = "task-2892543" [ 1320.235395] env[68285]: _type = "Task" [ 1320.235395] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.243616] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892543, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.313963] env[68285]: DEBUG oslo_vmware.api [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5227c741-b3e9-81aa-4e7d-21db6dbedfb8, 'name': SearchDatastore_Task, 'duration_secs': 0.009963} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.314341] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.314614] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.424773] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.468083] env[68285]: DEBUG nova.compute.manager [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Received event network-changed-75725a79-82bc-49ae-a645-d04ed26d28a7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1320.468353] env[68285]: DEBUG nova.compute.manager [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Refreshing instance network info cache due to event network-changed-75725a79-82bc-49ae-a645-d04ed26d28a7. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1320.468627] env[68285]: DEBUG oslo_concurrency.lockutils [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] Acquiring lock "refresh_cache-e449ac04-e05c-4134-95b3-4bbc45fa26e4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.468833] env[68285]: DEBUG oslo_concurrency.lockutils [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] Acquired lock "refresh_cache-e449ac04-e05c-4134-95b3-4bbc45fa26e4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.469083] env[68285]: DEBUG nova.network.neutron [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Refreshing network info cache for port 75725a79-82bc-49ae-a645-d04ed26d28a7 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1320.485733] env[68285]: DEBUG oslo_vmware.api [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892542, 'name': PowerOnVM_Task, 'duration_secs': 1.113055} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.486079] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1320.486357] env[68285]: INFO nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1320.486625] env[68285]: DEBUG nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1320.487710] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad707103-bdcc-41c8-9dfa-9a3b5cb897d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.640730] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 2e5a2839-3cdf-436d-89eb-5d6f83c3bf81] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1320.696114] env[68285]: INFO nova.compute.manager [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Rescuing [ 1320.696114] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.696114] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.696114] env[68285]: DEBUG nova.network.neutron [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1320.732944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-22260f24-e58b-4ff7-9628-b9683da94145 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.786s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.745746] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892543, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.009419] env[68285]: INFO nova.compute.manager [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Took 16.12 seconds to build instance. [ 1321.053692] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057af84f-2873-4c50-bc2f-e12e0f530391 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.065173] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa54db0a-c653-443f-ad90-1736e1da598b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.101969] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9a9edb-4f06-4eb0-b3f7-677796e1fd55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.110222] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a6aa8f-0200-419c-8d25-48548b8aa81b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.124601] env[68285]: DEBUG nova.compute.provider_tree [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.143590] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1f5fe064-0443-4b7f-911a-45d803836eeb] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1321.245596] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892543, 'name': CreateVM_Task, 'duration_secs': 0.665446} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.245777] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1321.246491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.246657] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.246971] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1321.247247] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7af6fbe1-29f8-4686-a137-0ed5111febab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.251965] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1321.251965] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5293f476-c0ee-9003-8afc-3d1df7938cef" [ 1321.251965] env[68285]: _type = "Task" [ 1321.251965] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.261939] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5293f476-c0ee-9003-8afc-3d1df7938cef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.262836] env[68285]: DEBUG nova.network.neutron [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Updated VIF entry in instance network info cache for port 75725a79-82bc-49ae-a645-d04ed26d28a7. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1321.263225] env[68285]: DEBUG nova.network.neutron [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Updating instance_info_cache with network_info: [{"id": "75725a79-82bc-49ae-a645-d04ed26d28a7", "address": "fa:16:3e:4b:1a:79", "network": {"id": "0756ce43-95d7-436f-9b74-5cdd666adbfd", "bridge": "br-int", "label": "tempest-ServersTestJSON-763693016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "288595d9298e43fa859bc6b68054aa08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51ae336c-12cf-406a-b1ca-54e9ce553b3e", "external-id": "nsx-vlan-transportzone-30", "segmentation_id": 30, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75725a79-82", "ovs_interfaceid": "75725a79-82bc-49ae-a645-d04ed26d28a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.469180] env[68285]: DEBUG nova.network.neutron [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.511289] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b185adc1-07a0-4af0-9366-ce7b57a8d110 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.633s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.629782] env[68285]: DEBUG nova.scheduler.client.report [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1321.646240] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1a7d1cfc-67a5-4178-9bc2-eb8af5104d11] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1321.762900] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5293f476-c0ee-9003-8afc-3d1df7938cef, 'name': SearchDatastore_Task, 'duration_secs': 0.024819} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.763299] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1321.763598] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1321.763909] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.764083] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.764320] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1321.764644] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e250380-0cc5-46fe-9336-e18967dd5e50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.766974] env[68285]: DEBUG oslo_concurrency.lockutils [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] Releasing lock "refresh_cache-e449ac04-e05c-4134-95b3-4bbc45fa26e4" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1321.767243] env[68285]: DEBUG nova.compute.manager [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-changed-efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1321.767466] env[68285]: DEBUG nova.compute.manager [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing instance network info cache due to event network-changed-efe1cc65-a9a1-4768-81db-53da716df13a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1321.767723] env[68285]: DEBUG oslo_concurrency.lockutils [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.767877] env[68285]: DEBUG oslo_concurrency.lockutils [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.768050] env[68285]: DEBUG nova.network.neutron [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing network info cache for port efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1321.776338] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1321.776529] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1321.777257] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c982b84-32ee-4f1e-b04e-a82450825a99 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.782985] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1321.782985] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bc5d5f-72f6-a6cd-ca7c-899d0858ceff" [ 1321.782985] env[68285]: _type = "Task" [ 1321.782985] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.783296] env[68285]: INFO nova.compute.manager [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Rescuing [ 1321.783557] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.783721] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.783887] env[68285]: DEBUG nova.network.neutron [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1321.793535] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bc5d5f-72f6-a6cd-ca7c-899d0858ceff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.972085] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1322.149809] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 2a9b3b56-8607-4da8-9186-8a933cfe0351] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1322.297246] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bc5d5f-72f6-a6cd-ca7c-899d0858ceff, 'name': SearchDatastore_Task, 'duration_secs': 0.009892} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.297996] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a578e20-e4f2-4fd7-a9b8-59cf81d359bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.304865] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1322.304865] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522e0c85-5dba-b9ec-f8da-4dd84878e2b4" [ 1322.304865] env[68285]: _type = "Task" [ 1322.304865] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.315345] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522e0c85-5dba-b9ec-f8da-4dd84878e2b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.564174] env[68285]: DEBUG nova.network.neutron [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Updating instance_info_cache with network_info: [{"id": "e08da820-30b8-48ec-b099-d1f963c95d5e", "address": "fa:16:3e:86:fd:df", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape08da820-30", "ovs_interfaceid": "e08da820-30b8-48ec-b099-d1f963c95d5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.597736] env[68285]: DEBUG nova.network.neutron [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updated VIF entry in instance network info cache for port efe1cc65-a9a1-4768-81db-53da716df13a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1322.598109] env[68285]: DEBUG nova.network.neutron [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.644508] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.330s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.647340] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.223s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.647705] env[68285]: DEBUG nova.objects.instance [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lazy-loading 'resources' on Instance uuid 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1322.652550] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1dce61a2-0fe2-4384-835c-7e324446d7cc] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1322.817086] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522e0c85-5dba-b9ec-f8da-4dd84878e2b4, 'name': SearchDatastore_Task, 'duration_secs': 0.038608} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.817424] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1322.817748] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] e449ac04-e05c-4134-95b3-4bbc45fa26e4/e449ac04-e05c-4134-95b3-4bbc45fa26e4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1322.818220] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b1fcd46-2aad-48e8-8abb-51753eff1ad5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.825234] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1322.825234] env[68285]: value = "task-2892544" [ 1322.825234] env[68285]: _type = "Task" [ 1322.825234] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.833323] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892544, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.066879] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "refresh_cache-c341075b-9d30-45db-9d83-f196bf90ecd3" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.100936] env[68285]: DEBUG oslo_concurrency.lockutils [req-0c9fcb68-32cf-44e8-b28a-13f846a363b3 req-db03bf49-8eed-455e-a319-1be649cb0c47 service nova] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.159650] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9e81990d-e63e-48a7-8941-f0298ca184b3] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1323.217089] env[68285]: INFO nova.scheduler.client.report [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted allocation for migration dd2dc971-bb14-469f-bd32-e05d3aade332 [ 1323.335132] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892544, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49678} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.337651] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] e449ac04-e05c-4134-95b3-4bbc45fa26e4/e449ac04-e05c-4134-95b3-4bbc45fa26e4.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1323.337866] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1323.338562] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e27b8986-be19-4d51-8c9d-7274a687199d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.345466] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1323.345466] env[68285]: value = "task-2892545" [ 1323.345466] env[68285]: _type = "Task" [ 1323.345466] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.357063] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892545, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.394949] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4bb9fd-190c-49eb-9d27-f29f4bd38a02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.403191] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cd2558-add1-48da-8d5d-db0f82dc590a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.434941] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f0121e-31a8-41f8-b6e0-4d6b747979af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.442512] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36aa9bc7-7dd5-4121-8828-91c2b4b10847 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.455907] env[68285]: DEBUG nova.compute.provider_tree [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.509866] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.510222] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69a965af-86b2-4afd-b05b-2d5657265006 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.517811] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1323.517811] env[68285]: value = "task-2892546" [ 1323.517811] env[68285]: _type = "Task" [ 1323.517811] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.525827] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892546, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.664018] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 3c71f649-b456-45a0-a113-725a529702a2] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1323.717009] env[68285]: INFO nova.compute.manager [None req-c4642ee9-ea0b-47b0-bfa2-9fd823555200 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Get console output [ 1323.717281] env[68285]: WARNING nova.virt.vmwareapi.driver [None req-c4642ee9-ea0b-47b0-bfa2-9fd823555200 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] The console log is missing. Check your VSPC configuration [ 1323.727350] env[68285]: DEBUG oslo_concurrency.lockutils [None req-619e76c3-d4c8-46bc-8d08-f23969394f84 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.903s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.856859] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892545, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063858} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.858822] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1323.858822] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34b48a6-8b64-4786-8273-c4aefde5db20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.881036] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] e449ac04-e05c-4134-95b3-4bbc45fa26e4/e449ac04-e05c-4134-95b3-4bbc45fa26e4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1323.881630] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-627db5bd-d266-4aa5-8b0d-b17499c7a15a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.901114] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1323.901114] env[68285]: value = "task-2892547" [ 1323.901114] env[68285]: _type = "Task" [ 1323.901114] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.909040] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892547, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.959806] env[68285]: DEBUG nova.scheduler.client.report [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1324.028116] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892546, 'name': PowerOffVM_Task, 'duration_secs': 0.276598} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.028399] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1324.029248] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f599d1-4186-47ff-8a03-eeb7a7660707 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.050384] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08261a07-f16d-48df-9867-9ecaf11a6ab0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.083014] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.083315] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-477aeef3-6fef-4d3d-8b5b-6c28e3df4919 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.089879] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1324.089879] env[68285]: value = "task-2892548" [ 1324.089879] env[68285]: _type = "Task" [ 1324.089879] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.101190] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.167385] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 64103f25-6411-44be-a60f-b9c276dba331] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1324.411090] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892547, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.465631] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.489897] env[68285]: INFO nova.scheduler.client.report [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Deleted allocations for instance 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6 [ 1324.601735] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.602048] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1324.602237] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1324.602501] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.602692] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1324.602841] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1324.603094] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fae26d9-a6fe-4a7a-aaac-e9bc1ac8203a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.604639] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-888dcadf-1895-49f4-8575-a5605503debc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.613104] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1324.613104] env[68285]: value = "task-2892549" [ 1324.613104] env[68285]: _type = "Task" [ 1324.613104] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.616956] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1324.617141] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1324.618109] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1208884d-5470-45f1-9b3e-f626b10d870b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.623095] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.625896] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1324.625896] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52263a19-5566-1401-5289-613e1ff387e9" [ 1324.625896] env[68285]: _type = "Task" [ 1324.625896] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.633182] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52263a19-5566-1401-5289-613e1ff387e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.670589] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: f13ad5e7-341f-4475-b334-2144b0923e3b] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1324.911574] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892547, 'name': ReconfigVM_Task, 'duration_secs': 0.817703} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.911836] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Reconfigured VM instance instance-00000076 to attach disk [datastore1] e449ac04-e05c-4134-95b3-4bbc45fa26e4/e449ac04-e05c-4134-95b3-4bbc45fa26e4.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1324.912602] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e3ca311-e6cb-49f4-8fcf-d393d150ad52 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.919218] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1324.919218] env[68285]: value = "task-2892550" [ 1324.919218] env[68285]: _type = "Task" [ 1324.919218] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.927185] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892550, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.998550] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7bfce452-403f-4ba1-87ce-dd7e83db2f2f tempest-ServersTestManualDisk-197628356 tempest-ServersTestManualDisk-197628356-project-member] Lock "51bdaa10-0cf3-4052-9f5c-7d4dad565fd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.327s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.123364] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892549, 'name': PowerOffVM_Task, 'duration_secs': 0.419396} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.123632] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1325.124431] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f18140-a1cc-4ee1-9d35-6670cc9eae3a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.148790] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e1fb1b-f430-4cf9-893d-0ef868c4bf02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.151735] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52263a19-5566-1401-5289-613e1ff387e9, 'name': SearchDatastore_Task, 'duration_secs': 0.014648} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.152805] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d19a957-ab73-4ce4-8392-8cf3a269d884 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.162666] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1325.162666] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520ba9a0-6dc3-0a5a-4708-09627964fae3" [ 1325.162666] env[68285]: _type = "Task" [ 1325.162666] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.170677] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520ba9a0-6dc3-0a5a-4708-09627964fae3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.174349] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d4f20336-9c29-4aac-8c0d-f577749cd7d7] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1325.183562] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1325.183831] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abbc6a71-299d-44e5-b61c-374ba0fe8035 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.191242] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1325.191242] env[68285]: value = "task-2892551" [ 1325.191242] env[68285]: _type = "Task" [ 1325.191242] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.201491] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1325.201691] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1325.201931] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.202097] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1325.202277] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.202543] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16463606-c7c5-47f8-9b30-be8938c69945 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.213894] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.214159] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1325.214779] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cba4afd-a688-4895-8d74-08ba6ae5d085 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.221875] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1325.221875] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]528d840d-6ab7-0431-9819-8270f954fea6" [ 1325.221875] env[68285]: _type = "Task" [ 1325.221875] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.229158] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528d840d-6ab7-0431-9819-8270f954fea6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.429396] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892550, 'name': Rename_Task, 'duration_secs': 0.150425} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.429670] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1325.429910] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-298224a1-01e7-4e69-8d9b-ec5e77e5191c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.436335] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1325.436335] env[68285]: value = "task-2892552" [ 1325.436335] env[68285]: _type = "Task" [ 1325.436335] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.443974] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.676754] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520ba9a0-6dc3-0a5a-4708-09627964fae3, 'name': SearchDatastore_Task, 'duration_secs': 0.021245} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.677368] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1325.677825] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. {{(pid=68285) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1325.678318] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: b3b7f551-81aa-4ac4-9906-020fac5f01f7] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1325.680154] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b559ac7d-e128-4642-826f-feb866f6c34c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.689661] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1325.689661] env[68285]: value = "task-2892553" [ 1325.689661] env[68285]: _type = "Task" [ 1325.689661] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.700548] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.732377] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]528d840d-6ab7-0431-9819-8270f954fea6, 'name': SearchDatastore_Task, 'duration_secs': 0.028027} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.733272] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6ddc461-c8a9-4c25-ae28-151e01d2bf3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.739500] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1325.739500] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]527e3cc6-0ca3-85db-09f2-151f6c9a1b70" [ 1325.739500] env[68285]: _type = "Task" [ 1325.739500] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.747615] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527e3cc6-0ca3-85db-09f2-151f6c9a1b70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.946769] env[68285]: DEBUG oslo_vmware.api [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892552, 'name': PowerOnVM_Task, 'duration_secs': 0.44618} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.947044] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1325.947254] env[68285]: INFO nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Took 7.67 seconds to spawn the instance on the hypervisor. [ 1325.947432] env[68285]: DEBUG nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1325.948363] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f85f059-db3a-4bad-93c3-99398bb3bf07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.184661] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 34aeba05-804e-444c-8e58-69c7721b10b1] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1326.199769] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892553, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.251150] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]527e3cc6-0ca3-85db-09f2-151f6c9a1b70, 'name': SearchDatastore_Task, 'duration_secs': 0.014036} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.251439] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1326.251705] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. {{(pid=68285) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1326.252032] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae1a29a4-eabf-46cb-aaaf-9f198725b791 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.265445] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1326.265445] env[68285]: value = "task-2892554" [ 1326.265445] env[68285]: _type = "Task" [ 1326.265445] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.274509] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.466023] env[68285]: INFO nova.compute.manager [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Took 12.52 seconds to build instance. [ 1326.687845] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 5e101d74-7a82-4118-8f4c-7af9a6b0917a] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1326.707307] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709777} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.707890] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. [ 1326.708599] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a68c98-499a-432b-87e4-9da38bfce1d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.756213] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.757098] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b5a57f7-e25f-4830-bea9-0c890d5b30f0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.783277] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892554, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.785066] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1326.785066] env[68285]: value = "task-2892555" [ 1326.785066] env[68285]: _type = "Task" [ 1326.785066] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.795213] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892555, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.967148] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bac00b7-5581-4055-9955-01be272a0f09 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.028s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.192929] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 60144efd-061e-4144-9541-b2321c9b0ec1] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1327.283374] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55271} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.283655] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. [ 1327.284426] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc823dbe-ca0f-43f1-b584-638cbd8e563d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.294279] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892555, 'name': ReconfigVM_Task, 'duration_secs': 0.371539} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.308273] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1327.316143] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1327.316859] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10bdea1-a305-4cc1-a6d8-1bde7f287503 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.319293] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1da4ce7-5294-4554-a878-03681b3ac343 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.360172] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2834a7fa-f328-4850-a57b-a4af5afc0f30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.377275] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1327.377275] env[68285]: value = "task-2892556" [ 1327.377275] env[68285]: _type = "Task" [ 1327.377275] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.385557] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1327.385557] env[68285]: value = "task-2892557" [ 1327.385557] env[68285]: _type = "Task" [ 1327.385557] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.394289] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.403214] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.696016] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: ec89a2a4-3bfc-45c5-b7f2-239b52995d6b] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1327.878463] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.879204] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.879449] env[68285]: DEBUG nova.compute.manager [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1327.880448] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f179df4-971b-4871-bab3-709621f23280 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.893481] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.893809] env[68285]: DEBUG nova.compute.manager [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1327.894376] env[68285]: DEBUG nova.objects.instance [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'flavor' on Instance uuid e449ac04-e05c-4134-95b3-4bbc45fa26e4 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1327.900520] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.929447] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.929714] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.198831] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: a97df3d2-c182-46d8-95c2-61caccade285] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1328.390160] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892556, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.399242] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892557, 'name': ReconfigVM_Task, 'duration_secs': 0.609267} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.399484] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1328.399727] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d4f73ba-36ee-4c1d-9af3-6807ad8c7c4f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.406148] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1328.406148] env[68285]: value = "task-2892558" [ 1328.406148] env[68285]: _type = "Task" [ 1328.406148] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.413256] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892558, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.432321] env[68285]: DEBUG nova.compute.utils [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1328.702572] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1328.702796] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Cleaning up deleted instances with incomplete migration {{(pid=68285) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1328.891676] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892556, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.904422] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1328.904673] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30a3635c-2b7f-4b2c-bf09-e20c7bd16fb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.915542] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892558, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.916789] env[68285]: DEBUG oslo_vmware.api [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1328.916789] env[68285]: value = "task-2892559" [ 1328.916789] env[68285]: _type = "Task" [ 1328.916789] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.924009] env[68285]: DEBUG oslo_vmware.api [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892559, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.934700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.205328] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.391761] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892556, 'name': ReconfigVM_Task, 'duration_secs': 1.72836} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.392071] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Reconfigured VM instance instance-00000075 to attach disk [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1329.393210] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd576a13-7cd3-4919-94df-24280fb39d64 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.423315] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-069d7938-0521-4b27-b6ae-73fa3effd62d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.442879] env[68285]: DEBUG oslo_vmware.api [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892559, 'name': PowerOffVM_Task, 'duration_secs': 0.188247} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.446455] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1329.446658] env[68285]: DEBUG nova.compute.manager [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1329.446934] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892558, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.447229] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1329.447229] env[68285]: value = "task-2892560" [ 1329.447229] env[68285]: _type = "Task" [ 1329.447229] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.447903] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85360df0-757c-40e4-a7a9-641559bdeb89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.461209] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892560, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.926663] env[68285]: DEBUG oslo_vmware.api [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892558, 'name': PowerOnVM_Task, 'duration_secs': 1.096187} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.926903] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1329.929685] env[68285]: DEBUG nova.compute.manager [None req-e8b51eee-80ec-4d69-a74f-c4067f0fbb22 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1329.930534] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8db57d-e4d5-422f-967c-583f41cd0e3b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.965905] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892560, 'name': ReconfigVM_Task, 'duration_secs': 0.39282} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.966187] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1329.966453] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73b42ac8-589d-4805-876e-bb04d0f064dd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.968483] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d4076ef4-039f-46b6-8708-ae4a4f92695a tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.089s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.975142] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1329.975142] env[68285]: value = "task-2892561" [ 1329.975142] env[68285]: _type = "Task" [ 1329.975142] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.985722] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892561, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.008700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.008700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.008700] env[68285]: INFO nova.compute.manager [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Attaching volume 4757d027-11c7-4e4f-88f8-b2c6fcb82574 to /dev/sdb [ 1330.047969] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ba8400-84b0-4e0b-8c82-8c6d89e671da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.055946] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37884632-281c-4e09-835f-eee9cc0c663a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.071154] env[68285]: DEBUG nova.virt.block_device [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updating existing volume attachment record: b0be544b-69c7-4480-9966-d821128d4d3b {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1330.196066] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.196208] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.196304] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1330.486061] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892561, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.637593] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.769188] env[68285]: INFO nova.compute.manager [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Rebuilding instance [ 1330.818140] env[68285]: DEBUG nova.compute.manager [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1330.819000] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02950a1-aa1a-4897-8bc4-577e5c5bc95d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.870808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.870808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.870808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.870808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.870808] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.872109] env[68285]: INFO nova.compute.manager [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Terminating instance [ 1330.990864] env[68285]: DEBUG oslo_vmware.api [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892561, 'name': PowerOnVM_Task, 'duration_secs': 0.561381} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.991206] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1330.994643] env[68285]: DEBUG nova.compute.manager [None req-1a795d77-b22c-424e-b4c0-dc519b3f2ec3 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1330.995726] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3b41e8-dcfe-4261-a1ef-84875667c224 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.146246] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Getting list of instances from cluster (obj){ [ 1331.146246] env[68285]: value = "domain-c8" [ 1331.146246] env[68285]: _type = "ClusterComputeResource" [ 1331.146246] env[68285]: } {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1331.147888] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c47a31-d61d-410f-b623-ebafc6400a4d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.184562] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Got total of 15 instances {{(pid=68285) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1331.184828] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid d0f6ab86-e18d-42ac-bcf3-94eafb1939ff {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.185131] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid 3858399e-9fc4-4d60-a9d5-95caefb7bd87 {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.185385] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid d1446290-95ce-4e87-85df-7cc69bb57ce7 {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.185634] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid 801f524e-28b5-4452-b880-0fc30d3c5eef {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.185947] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid feda1a98-3086-43a6-a887-f4d1602ca8ee {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.186149] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid b2199b56-64bd-4096-b877-e10656b09313 {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.186389] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid 852ab501-00a6-442b-804a-1bbf49a2be8c {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.186626] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid faf810ae-7823-4115-a709-99dc7c480867 {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.186865] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid 0d99fb99-977e-4edc-93d8-492d55fd68a7 {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.187116] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid a1dc8c86-523f-4474-9fea-9ccf35a36b3f {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.187359] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.187594] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid 8a598506-724f-48f6-91a8-1e02483e6aab {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.187831] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.188079] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid c341075b-9d30-45db-9d83-f196bf90ecd3 {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.188320] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Triggering sync for uuid e449ac04-e05c-4134-95b3-4bbc45fa26e4 {{(pid=68285) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1331.188833] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.189184] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.189577] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.189874] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.190239] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.190526] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.190866] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "801f524e-28b5-4452-b880-0fc30d3c5eef" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.191158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.191499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.191779] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.192130] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "b2199b56-64bd-4096-b877-e10656b09313" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.192405] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "b2199b56-64bd-4096-b877-e10656b09313" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.192744] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.193029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.193367] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "faf810ae-7823-4115-a709-99dc7c480867" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.193638] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "faf810ae-7823-4115-a709-99dc7c480867" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.193971] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.194304] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.194578] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.194834] env[68285]: INFO nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] During sync_power_state the instance has a pending task (rebuilding). Skip. [ 1331.195096] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.195381] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.195654] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.196188] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "8a598506-724f-48f6-91a8-1e02483e6aab" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.196474] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "8a598506-724f-48f6-91a8-1e02483e6aab" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.196807] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.197104] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.197438] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "c341075b-9d30-45db-9d83-f196bf90ecd3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.197714] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.197959] env[68285]: INFO nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] During sync_power_state the instance has a pending task (rescuing). Skip. [ 1331.198220] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.198503] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.199679] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2e8cbe-7c83-4a46-8919-1e1544989e11 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.203955] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd271d51-a547-43a0-8cb9-3951c3f0f1e5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.208079] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8241b7-591c-4246-ad81-24e9b3b79319 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.211869] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c554b6f-306e-42d3-93fd-978b0454c196 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.215785] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a8f1c2-595a-4ef2-ac87-3700600b6a20 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.219727] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b27cc8-5265-4f10-90b1-0aa46930f26b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.223499] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1914997c-f5d5-479b-9981-a2f58f2ed452 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.228366] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9898cbc4-b28f-44df-9e68-424d966d090a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.232641] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79229d50-ac1a-44df-84f5-7a2fd898fc44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.236997] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdfdfd1-f853-4d36-a4f0-49e2439f04b0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.241193] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd635910-ab47-49ef-ad4e-720242868c41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.276674] env[68285]: INFO nova.compute.manager [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Unrescuing [ 1331.276974] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.277206] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquired lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.277446] env[68285]: DEBUG nova.network.neutron [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1331.293624] env[68285]: WARNING oslo_messaging._drivers.amqpdriver [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1331.298386] env[68285]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1331.376024] env[68285]: DEBUG nova.compute.manager [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1331.376277] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1331.377334] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256080f1-a958-4491-ae07-d125603afb79 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.386396] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1331.386652] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-704d5866-0150-45a1-8db2-b119d7f0af4b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.618037] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1331.618373] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1331.618437] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleting the datastore file [datastore1] e449ac04-e05c-4134-95b3-4bbc45fa26e4 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1331.618765] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-762e6676-d12b-4438-83fd-259807cb3bb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.626027] env[68285]: DEBUG oslo_vmware.api [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1331.626027] env[68285]: value = "task-2892564" [ 1331.626027] env[68285]: _type = "Task" [ 1331.626027] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.634134] env[68285]: DEBUG oslo_vmware.api [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.788023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.598s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.791613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.600s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.795170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.606s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.797178] env[68285]: INFO nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] During sync_power_state the instance has a pending task (unrescuing). Skip. [ 1331.797385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.604s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.798031] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "8a598506-724f-48f6-91a8-1e02483e6aab" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.601s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.802390] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.605s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.802467] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.607s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.802837] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "faf810ae-7823-4115-a709-99dc7c480867" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.609s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.808922] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.618s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.809239] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "b2199b56-64bd-4096-b877-e10656b09313" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.617s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.809700] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.618s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.834116] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1331.834540] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bdffc7b-6d1a-4df5-ae50-c06edf9019f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.842162] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1331.842162] env[68285]: value = "task-2892565" [ 1331.842162] env[68285]: _type = "Task" [ 1331.842162] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.852238] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.020288] env[68285]: DEBUG nova.network.neutron [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.136772] env[68285]: DEBUG oslo_vmware.api [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216377} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.137052] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1332.137253] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1332.138025] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1332.138025] env[68285]: INFO nova.compute.manager [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Took 0.76 seconds to destroy the instance on the hypervisor. [ 1332.138025] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1332.138025] env[68285]: DEBUG nova.compute.manager [-] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1332.138231] env[68285]: DEBUG nova.network.neutron [-] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1332.354814] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892565, 'name': PowerOffVM_Task, 'duration_secs': 0.215824} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.355117] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.355353] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1332.356454] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b59c4b-4498-4eea-960c-529cd8b45c16 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.364394] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1332.364916] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8bc5c1b-f441-4fa8-a4ec-b4343657d821 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.402166] env[68285]: DEBUG nova.compute.manager [req-60fc6afa-dd3d-48aa-87fd-d38a901e8c0f req-286d9edc-fce7-4a08-923d-7613fc35dbba service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Received event network-vif-deleted-75725a79-82bc-49ae-a645-d04ed26d28a7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1332.402815] env[68285]: INFO nova.compute.manager [req-60fc6afa-dd3d-48aa-87fd-d38a901e8c0f req-286d9edc-fce7-4a08-923d-7613fc35dbba service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Neutron deleted interface 75725a79-82bc-49ae-a645-d04ed26d28a7; detaching it from the instance and deleting it from the info cache [ 1332.402815] env[68285]: DEBUG nova.network.neutron [req-60fc6afa-dd3d-48aa-87fd-d38a901e8c0f req-286d9edc-fce7-4a08-923d-7613fc35dbba service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.451116] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1332.451369] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1332.451568] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleting the datastore file [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1332.451860] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0adcb476-cbeb-4c2d-a2e9-b8d695ec23c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.458926] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1332.458926] env[68285]: value = "task-2892568" [ 1332.458926] env[68285]: _type = "Task" [ 1332.458926] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.472199] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.523131] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Releasing lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.523804] env[68285]: DEBUG nova.objects.instance [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lazy-loading 'flavor' on Instance uuid 852ab501-00a6-442b-804a-1bbf49a2be8c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.379443] env[68285]: DEBUG nova.network.neutron [-] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.387018] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b8377cb-fd09-4fb1-aac8-09c2f7c6a3bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.395437] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "6f6037bf-5527-4391-857b-47bc68fb04fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1333.395844] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.398250] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3db498-98a5-472d-86ec-86bf81915608 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.412588] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195462} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.447593] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.447984] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1333.448320] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1333.453148] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1333.458521] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52304e17-939a-4c4f-bdeb-0afba1d81dd9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.477403] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3350222e-f3f3-4b75-bc3e-936f29d4751b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.487129] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1333.487129] env[68285]: value = "task-2892569" [ 1333.487129] env[68285]: _type = "Task" [ 1333.487129] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.515959] env[68285]: DEBUG nova.compute.manager [req-60fc6afa-dd3d-48aa-87fd-d38a901e8c0f req-286d9edc-fce7-4a08-923d-7613fc35dbba service nova] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Detach interface failed, port_id=75725a79-82bc-49ae-a645-d04ed26d28a7, reason: Instance e449ac04-e05c-4134-95b3-4bbc45fa26e4 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1333.522464] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.887276] env[68285]: INFO nova.compute.manager [-] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Took 1.75 seconds to deallocate network for instance. [ 1333.903549] env[68285]: DEBUG nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1333.997964] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892569, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.395313] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.395652] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.395808] env[68285]: DEBUG nova.objects.instance [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'resources' on Instance uuid e449ac04-e05c-4134-95b3-4bbc45fa26e4 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1334.423517] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.487746] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1334.489033] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1334.489033] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1334.489033] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1334.489033] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1334.489033] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1334.489033] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1334.489302] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1334.489302] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1334.489794] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1334.489794] env[68285]: DEBUG nova.virt.hardware [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1334.490498] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0db102-33fc-40f4-8f92-06341176169e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.501395] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892569, 'name': PowerOffVM_Task, 'duration_secs': 0.568948} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.502750] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.507757] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfiguring VM instance instance-0000006c to detach disk 2002 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1334.508051] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a1aa62e-4a83-48ab-a6fc-9bbdba82bfd2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.521400] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fa19d4-64c8-490c-bfcb-d8dc01923867 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.534881] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:68:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07d808df-d1b1-42f4-8853-e537f5b160e0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1334.541884] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1334.543050] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1334.543356] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1334.543356] env[68285]: value = "task-2892570" [ 1334.543356] env[68285]: _type = "Task" [ 1334.543356] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.543537] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ad4a441-4cc7-41be-9c9f-0a623b0b3242 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.566506] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.567595] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1334.567595] env[68285]: value = "task-2892571" [ 1334.567595] env[68285]: _type = "Task" [ 1334.567595] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.575142] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892571, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.622912] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1334.623298] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581091', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'name': 'volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0d99fb99-977e-4edc-93d8-492d55fd68a7', 'attached_at': '', 'detached_at': '', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'serial': '4757d027-11c7-4e4f-88f8-b2c6fcb82574'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1334.624409] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa140cd6-5a1a-42ba-9ebd-52a123a8d6c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.640196] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b9e612-dd49-4b21-9abe-bc693062f5e1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.665581] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574/volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1334.666167] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77b0aed3-4203-42d1-aea0-fe5bb1d830e4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.684276] env[68285]: DEBUG oslo_vmware.api [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1334.684276] env[68285]: value = "task-2892572" [ 1334.684276] env[68285]: _type = "Task" [ 1334.684276] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.691843] env[68285]: DEBUG oslo_vmware.api [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892572, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.069287] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892570, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.076911] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892571, 'name': CreateVM_Task, 'duration_secs': 0.299315} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.079048] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1335.079850] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.079986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1335.080323] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1335.080594] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e339f3f-cac9-40c5-ab8d-d29326cf4715 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.084669] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1335.084669] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5240d20a-bb04-3c3e-2e02-3e5f553f84a2" [ 1335.084669] env[68285]: _type = "Task" [ 1335.084669] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.089263] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be714dfd-0e77-4ff3-aca4-9f4012e8add3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.094579] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5240d20a-bb04-3c3e-2e02-3e5f553f84a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.098493] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c061bb-0d1d-432a-8236-13f3af8e4d90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.127396] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92379ea7-00c5-434a-b340-9bb7b3d83418 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.133970] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbedbc1-1a42-48cd-b824-c3d838e48130 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.147506] env[68285]: DEBUG nova.compute.provider_tree [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1335.193739] env[68285]: DEBUG oslo_vmware.api [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892572, 'name': ReconfigVM_Task, 'duration_secs': 0.370319} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.193961] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Reconfigured VM instance instance-0000006f to attach disk [datastore2] volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574/volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1335.198404] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40d688fb-5429-47e8-bf12-25883a676508 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.212454] env[68285]: DEBUG oslo_vmware.api [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1335.212454] env[68285]: value = "task-2892573" [ 1335.212454] env[68285]: _type = "Task" [ 1335.212454] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.221040] env[68285]: DEBUG oslo_vmware.api [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892573, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.568876] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892570, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.593521] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5240d20a-bb04-3c3e-2e02-3e5f553f84a2, 'name': SearchDatastore_Task, 'duration_secs': 0.012547} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.593833] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.594075] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1335.594305] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.594447] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1335.594619] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1335.594864] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fde0975-1dc2-46b6-ae20-0e87510b1f70 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.604762] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1335.604991] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1335.605667] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be4c0460-8921-4684-9202-b20a4280e314 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.610343] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1335.610343] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5223bf27-b17f-4bcd-74ca-8fde9e0c8658" [ 1335.610343] env[68285]: _type = "Task" [ 1335.610343] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.617601] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5223bf27-b17f-4bcd-74ca-8fde9e0c8658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.650811] env[68285]: DEBUG nova.scheduler.client.report [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1335.721841] env[68285]: DEBUG oslo_vmware.api [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892573, 'name': ReconfigVM_Task, 'duration_secs': 0.133083} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.722173] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581091', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'name': 'volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0d99fb99-977e-4edc-93d8-492d55fd68a7', 'attached_at': '', 'detached_at': '', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'serial': '4757d027-11c7-4e4f-88f8-b2c6fcb82574'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1336.069178] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892570, 'name': ReconfigVM_Task, 'duration_secs': 1.24256} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.069443] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfigured VM instance instance-0000006c to detach disk 2002 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1336.069628] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1336.069867] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b2619b4-e8fd-4413-8562-20043237ef2b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.076226] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1336.076226] env[68285]: value = "task-2892574" [ 1336.076226] env[68285]: _type = "Task" [ 1336.076226] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.083236] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.120541] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5223bf27-b17f-4bcd-74ca-8fde9e0c8658, 'name': SearchDatastore_Task, 'duration_secs': 0.016454} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.121246] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1d9c375-08d1-4220-91c3-0568fd2bc3a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.125873] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1336.125873] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52197099-ec5f-8c22-473a-0400cc7fdba0" [ 1336.125873] env[68285]: _type = "Task" [ 1336.125873] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.132984] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52197099-ec5f-8c22-473a-0400cc7fdba0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.155865] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.157916] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.735s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.159340] env[68285]: INFO nova.compute.claims [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1336.205412] env[68285]: INFO nova.scheduler.client.report [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted allocations for instance e449ac04-e05c-4134-95b3-4bbc45fa26e4 [ 1336.586652] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892574, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.636018] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52197099-ec5f-8c22-473a-0400cc7fdba0, 'name': SearchDatastore_Task, 'duration_secs': 0.032022} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.636348] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1336.636650] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1336.636927] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3d5d014-a963-47d3-addc-b3d49611771b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.643813] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1336.643813] env[68285]: value = "task-2892575" [ 1336.643813] env[68285]: _type = "Task" [ 1336.643813] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.652018] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892575, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.712532] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cc25ec8d-233e-4e1a-baed-0ede06f62e45 tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.843s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.713628] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.515s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.713835] env[68285]: INFO nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1336.714060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "e449ac04-e05c-4134-95b3-4bbc45fa26e4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.761302] env[68285]: DEBUG nova.objects.instance [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'flavor' on Instance uuid 0d99fb99-977e-4edc-93d8-492d55fd68a7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.086196] env[68285]: DEBUG oslo_vmware.api [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892574, 'name': PowerOnVM_Task, 'duration_secs': 0.668673} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.086489] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1337.086740] env[68285]: DEBUG nova.compute.manager [None req-7e4851a1-d390-4c9d-b344-cbe22d8f9d2f tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1337.087677] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa023ad9-c993-4f2c-bede-24119c36b6de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.154589] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892575, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.268919] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0bf698a0-4dd8-4ebe-a89d-1402503d185c tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.262s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.270151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.076s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.271331] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3a46ea-94ef-49ec-aaa2-5bdd0f30a622 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.368264] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b09a3d-20dc-4779-be7b-87bb0815d488 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.377199] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1fdae1-99d2-4ad5-ad21-91cb61162381 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.409739] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a702b7c-b955-4162-8958-539ef2aeee7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.418037] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8928a5-b13b-47b8-998a-b121575d0aac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.431382] env[68285]: DEBUG nova.compute.provider_tree [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.474106] env[68285]: DEBUG oslo_concurrency.lockutils [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.656325] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892575, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618655} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.656698] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1337.656774] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1337.657043] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-def23284-6e6d-4f5a-b289-a607e3a6ccfd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.663923] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1337.663923] env[68285]: value = "task-2892576" [ 1337.663923] env[68285]: _type = "Task" [ 1337.663923] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.672226] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.782697] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.512s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.783010] env[68285]: DEBUG oslo_concurrency.lockutils [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.309s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.800486] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.800821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.801102] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.801396] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.801647] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.803972] env[68285]: INFO nova.compute.manager [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Terminating instance [ 1337.934474] env[68285]: DEBUG nova.scheduler.client.report [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1337.994059] env[68285]: DEBUG nova.compute.manager [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Received event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1337.994059] env[68285]: DEBUG nova.compute.manager [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing instance network info cache due to event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1337.996949] env[68285]: DEBUG oslo_concurrency.lockutils [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] Acquiring lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.997160] env[68285]: DEBUG oslo_concurrency.lockutils [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] Acquired lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.997375] env[68285]: DEBUG nova.network.neutron [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1338.173862] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086338} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.174099] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1338.174864] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfe62f4-c951-4a48-b8c1-df66ff16484c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.196186] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1338.196575] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af7a05de-ee79-43c2-b697-da8ede34df80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.216357] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1338.216357] env[68285]: value = "task-2892577" [ 1338.216357] env[68285]: _type = "Task" [ 1338.216357] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.223877] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892577, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.285913] env[68285]: INFO nova.compute.manager [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Detaching volume 4757d027-11c7-4e4f-88f8-b2c6fcb82574 [ 1338.308286] env[68285]: DEBUG nova.compute.manager [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1338.308518] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1338.309445] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8befcb3-05bc-4e4a-9349-dde1c5ba0ab3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.317173] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.317436] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eca3dd07-d3be-4973-94ec-c4299cd22dd7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.323101] env[68285]: DEBUG oslo_vmware.api [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1338.323101] env[68285]: value = "task-2892578" [ 1338.323101] env[68285]: _type = "Task" [ 1338.323101] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.324041] env[68285]: INFO nova.virt.block_device [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Attempting to driver detach volume 4757d027-11c7-4e4f-88f8-b2c6fcb82574 from mountpoint /dev/sdb [ 1338.324261] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1338.324464] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581091', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'name': 'volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0d99fb99-977e-4edc-93d8-492d55fd68a7', 'attached_at': '', 'detached_at': '', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'serial': '4757d027-11c7-4e4f-88f8-b2c6fcb82574'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1338.325267] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90129fe2-c116-4d7e-8fff-c8de45cc0bb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.355959] env[68285]: DEBUG oslo_vmware.api [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.357126] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381274f1-1861-4464-a192-1b3a4d102334 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.364318] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc88a07-d668-4efe-af3b-86a43349210a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.385900] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff2bef8-db08-414e-9046-a66de7df2771 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.404538] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] The volume has not been displaced from its original location: [datastore2] volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574/volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1338.409964] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1338.410318] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b7ea719-6680-4dc6-9061-8180dd5890a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.429354] env[68285]: DEBUG oslo_vmware.api [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1338.429354] env[68285]: value = "task-2892579" [ 1338.429354] env[68285]: _type = "Task" [ 1338.429354] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.437953] env[68285]: DEBUG oslo_vmware.api [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.438732] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.439244] env[68285]: DEBUG nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1338.716878] env[68285]: DEBUG nova.network.neutron [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updated VIF entry in instance network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1338.717265] env[68285]: DEBUG nova.network.neutron [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.726894] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892577, 'name': ReconfigVM_Task, 'duration_secs': 0.283316} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.727768] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Reconfigured VM instance instance-00000070 to attach disk [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f/a1dc8c86-523f-4474-9fea-9ccf35a36b3f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1338.728364] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-067ce751-2b25-4414-b502-56e0695b8cfb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.734564] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1338.734564] env[68285]: value = "task-2892580" [ 1338.734564] env[68285]: _type = "Task" [ 1338.734564] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.741772] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892580, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.836300] env[68285]: DEBUG oslo_vmware.api [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892578, 'name': PowerOffVM_Task, 'duration_secs': 0.222339} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.892341] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1338.892341] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1338.892341] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05d5ba67-e376-4f26-af8d-630f63172230 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.915132] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1338.915419] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1338.915656] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleting the datastore file [datastore2] 3858399e-9fc4-4d60-a9d5-95caefb7bd87 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1338.915906] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ebab4f4-ae41-40c6-9abd-05eb5f1b93d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.922544] env[68285]: DEBUG oslo_vmware.api [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for the task: (returnval){ [ 1338.922544] env[68285]: value = "task-2892582" [ 1338.922544] env[68285]: _type = "Task" [ 1338.922544] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.931257] env[68285]: DEBUG oslo_vmware.api [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.939138] env[68285]: DEBUG oslo_vmware.api [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892579, 'name': ReconfigVM_Task, 'duration_secs': 0.229432} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.939383] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1338.946288] env[68285]: DEBUG nova.compute.utils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1338.947644] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dca5827b-1292-4bc9-8ed9-3b3f2eab27d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.958248] env[68285]: DEBUG nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1338.958451] env[68285]: DEBUG nova.network.neutron [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.966543] env[68285]: DEBUG oslo_vmware.api [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1338.966543] env[68285]: value = "task-2892583" [ 1338.966543] env[68285]: _type = "Task" [ 1338.966543] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.975019] env[68285]: DEBUG oslo_vmware.api [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.997494] env[68285]: DEBUG nova.policy [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5ee32979c0f43a2871e145e459e4240', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f732a9946001482bb76dee4e2cf844c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1339.223243] env[68285]: DEBUG oslo_concurrency.lockutils [req-dda61be0-5e14-492c-9fd3-586cddec785f req-d84c6911-e5a2-4780-a86c-007515199e19 service nova] Releasing lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1339.244638] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892580, 'name': Rename_Task, 'duration_secs': 0.154257} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.244922] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1339.245178] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d30114d-96d9-4d3c-877b-fda82edd9db1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.251777] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1339.251777] env[68285]: value = "task-2892584" [ 1339.251777] env[68285]: _type = "Task" [ 1339.251777] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.259190] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.269771] env[68285]: DEBUG nova.network.neutron [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Successfully created port: 57bdb510-a168-422a-93f0-3e2db2eb694d {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1339.436024] env[68285]: DEBUG oslo_vmware.api [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Task: {'id': task-2892582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188929} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.436024] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1339.436024] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1339.436024] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1339.436024] env[68285]: INFO nova.compute.manager [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1339.436024] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1339.436024] env[68285]: DEBUG nova.compute.manager [-] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1339.436024] env[68285]: DEBUG nova.network.neutron [-] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1339.458798] env[68285]: DEBUG nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1339.477525] env[68285]: DEBUG oslo_vmware.api [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892583, 'name': ReconfigVM_Task, 'duration_secs': 0.142584} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.477835] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581091', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'name': 'volume-4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0d99fb99-977e-4edc-93d8-492d55fd68a7', 'attached_at': '', 'detached_at': '', 'volume_id': '4757d027-11c7-4e4f-88f8-b2c6fcb82574', 'serial': '4757d027-11c7-4e4f-88f8-b2c6fcb82574'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1339.759283] env[68285]: DEBUG nova.compute.manager [req-cb1dfac8-8c49-4cea-9932-4f396ac619df req-85e9438d-afad-4aec-ba0c-1a5b57cdf271 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Received event network-vif-deleted-b1736f52-bada-4b08-820b-ac312cd00b5b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1339.759535] env[68285]: INFO nova.compute.manager [req-cb1dfac8-8c49-4cea-9932-4f396ac619df req-85e9438d-afad-4aec-ba0c-1a5b57cdf271 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Neutron deleted interface b1736f52-bada-4b08-820b-ac312cd00b5b; detaching it from the instance and deleting it from the info cache [ 1339.759637] env[68285]: DEBUG nova.network.neutron [req-cb1dfac8-8c49-4cea-9932-4f396ac619df req-85e9438d-afad-4aec-ba0c-1a5b57cdf271 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.764261] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892584, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.019018] env[68285]: DEBUG nova.compute.manager [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Received event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1340.019278] env[68285]: DEBUG nova.compute.manager [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing instance network info cache due to event network-changed-f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1340.019476] env[68285]: DEBUG oslo_concurrency.lockutils [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] Acquiring lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.019631] env[68285]: DEBUG oslo_concurrency.lockutils [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] Acquired lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1340.019777] env[68285]: DEBUG nova.network.neutron [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Refreshing network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1340.045328] env[68285]: DEBUG nova.objects.instance [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'flavor' on Instance uuid 0d99fb99-977e-4edc-93d8-492d55fd68a7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.239349] env[68285]: DEBUG nova.network.neutron [-] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.261460] env[68285]: DEBUG oslo_vmware.api [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892584, 'name': PowerOnVM_Task, 'duration_secs': 0.551652} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.261800] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1340.262032] env[68285]: DEBUG nova.compute.manager [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1340.262791] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca4c011-25a4-4ec1-b68c-697a00668e66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.265512] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cadb4837-e870-4f48-91a2-9d9a0c465c3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.277111] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796a4527-92ff-46f2-a512-f72b80fb833e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.312452] env[68285]: DEBUG nova.compute.manager [req-cb1dfac8-8c49-4cea-9932-4f396ac619df req-85e9438d-afad-4aec-ba0c-1a5b57cdf271 service nova] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Detach interface failed, port_id=b1736f52-bada-4b08-820b-ac312cd00b5b, reason: Instance 3858399e-9fc4-4d60-a9d5-95caefb7bd87 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1340.468360] env[68285]: DEBUG nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1340.495408] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1340.495801] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1340.495993] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1340.496205] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1340.496351] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1340.496495] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1340.496697] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1340.496852] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1340.497024] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1340.497189] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1340.497357] env[68285]: DEBUG nova.virt.hardware [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1340.498220] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c0ab9b-05dd-4144-9cf0-3564afd519af {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.506372] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee2a9e5-e3c5-4079-9e77-b58fb39711c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.741997] env[68285]: INFO nova.compute.manager [-] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Took 1.31 seconds to deallocate network for instance. [ 1340.784643] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.784941] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.787627] env[68285]: DEBUG nova.objects.instance [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1340.838805] env[68285]: DEBUG nova.network.neutron [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updated VIF entry in instance network info cache for port f4b82f26-eff6-4869-af1c-0bc1a3a4d606. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1340.838805] env[68285]: DEBUG nova.network.neutron [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [{"id": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "address": "fa:16:3e:36:84:f9", "network": {"id": "f3d70176-6ccb-4898-8699-1363718ad40f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1022320100-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0142f80018fe4d41830f10307dd482f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a15de394-0367-4921-a5c1-6ac8615e3283", "external-id": "nsx-vlan-transportzone-13", "segmentation_id": 13, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b82f26-ef", "ovs_interfaceid": "f4b82f26-eff6-4869-af1c-0bc1a3a4d606", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.932067] env[68285]: DEBUG nova.network.neutron [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Successfully updated port: 57bdb510-a168-422a-93f0-3e2db2eb694d {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1341.055796] env[68285]: DEBUG oslo_concurrency.lockutils [None req-62a68ee9-35ba-4674-bca0-c13b644c982b tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.273s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.248462] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.340071] env[68285]: DEBUG oslo_concurrency.lockutils [req-2cab3a57-14f7-44f1-b7db-9290b25fc28f req-8f1f19ab-e667-4963-9a88-79c5000d7324 service nova] Releasing lock "refresh_cache-852ab501-00a6-442b-804a-1bbf49a2be8c" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1341.434215] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.434434] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1341.434522] env[68285]: DEBUG nova.network.neutron [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1341.726243] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.726534] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.726759] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "0d99fb99-977e-4edc-93d8-492d55fd68a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.726944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.727129] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.729292] env[68285]: INFO nova.compute.manager [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Terminating instance [ 1341.787066] env[68285]: DEBUG nova.compute.manager [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Received event network-vif-plugged-57bdb510-a168-422a-93f0-3e2db2eb694d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1341.787329] env[68285]: DEBUG oslo_concurrency.lockutils [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] Acquiring lock "6f6037bf-5527-4391-857b-47bc68fb04fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.787484] env[68285]: DEBUG oslo_concurrency.lockutils [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.787647] env[68285]: DEBUG oslo_concurrency.lockutils [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.787813] env[68285]: DEBUG nova.compute.manager [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] No waiting events found dispatching network-vif-plugged-57bdb510-a168-422a-93f0-3e2db2eb694d {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1341.787978] env[68285]: WARNING nova.compute.manager [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Received unexpected event network-vif-plugged-57bdb510-a168-422a-93f0-3e2db2eb694d for instance with vm_state building and task_state spawning. [ 1341.788158] env[68285]: DEBUG nova.compute.manager [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Received event network-changed-57bdb510-a168-422a-93f0-3e2db2eb694d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1341.788308] env[68285]: DEBUG nova.compute.manager [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Refreshing instance network info cache due to event network-changed-57bdb510-a168-422a-93f0-3e2db2eb694d. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1341.788470] env[68285]: DEBUG oslo_concurrency.lockutils [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] Acquiring lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.793121] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f2776bf5-9856-4036-9aa4-88ab7d5dd678 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.794092] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.546s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.794308] env[68285]: DEBUG nova.objects.instance [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lazy-loading 'resources' on Instance uuid 3858399e-9fc4-4d60-a9d5-95caefb7bd87 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.967570] env[68285]: DEBUG nova.network.neutron [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1342.082486] env[68285]: DEBUG nova.network.neutron [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Updating instance_info_cache with network_info: [{"id": "57bdb510-a168-422a-93f0-3e2db2eb694d", "address": "fa:16:3e:d7:fb:70", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdb510-a1", "ovs_interfaceid": "57bdb510-a168-422a-93f0-3e2db2eb694d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.232898] env[68285]: DEBUG nova.compute.manager [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1342.233098] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1342.234036] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02dfbcf-a054-4dd0-8294-b591ecd8ab57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.241993] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1342.242238] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b07061a8-af9f-4148-b8f6-14ec879cb64a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.248214] env[68285]: DEBUG oslo_vmware.api [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1342.248214] env[68285]: value = "task-2892585" [ 1342.248214] env[68285]: _type = "Task" [ 1342.248214] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.255814] env[68285]: DEBUG oslo_vmware.api [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.505597] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1582463-8c3b-4595-9156-c0ba58f45c8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.513483] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1a9d41-2381-48a9-ba10-88f8e3078bcf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.542828] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60376350-c5dd-47e5-8cef-659f4a6ef667 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.550390] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51cb4c1-d231-4b82-b437-a435044bbc96 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.563731] env[68285]: DEBUG nova.compute.provider_tree [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.585311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1342.585629] env[68285]: DEBUG nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Instance network_info: |[{"id": "57bdb510-a168-422a-93f0-3e2db2eb694d", "address": "fa:16:3e:d7:fb:70", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdb510-a1", "ovs_interfaceid": "57bdb510-a168-422a-93f0-3e2db2eb694d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1342.585931] env[68285]: DEBUG oslo_concurrency.lockutils [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] Acquired lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.586126] env[68285]: DEBUG nova.network.neutron [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Refreshing network info cache for port 57bdb510-a168-422a-93f0-3e2db2eb694d {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1342.590401] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:fb:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57bdb510-a168-422a-93f0-3e2db2eb694d', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1342.594669] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1342.595648] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1342.595874] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e5789a6-97cf-4620-a98f-576762a496b8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.617348] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1342.617348] env[68285]: value = "task-2892586" [ 1342.617348] env[68285]: _type = "Task" [ 1342.617348] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.625130] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892586, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.758868] env[68285]: DEBUG oslo_vmware.api [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892585, 'name': PowerOffVM_Task, 'duration_secs': 0.296574} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.759101] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1342.759277] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1342.759518] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-896558ee-f037-40ea-99d1-5e6437428d03 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.825568] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1342.825955] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1342.825955] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleting the datastore file [datastore1] 0d99fb99-977e-4edc-93d8-492d55fd68a7 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1342.826240] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ef104d4-ce4b-4bd8-96cf-89ed484b922d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.832966] env[68285]: DEBUG oslo_vmware.api [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for the task: (returnval){ [ 1342.832966] env[68285]: value = "task-2892588" [ 1342.832966] env[68285]: _type = "Task" [ 1342.832966] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.840914] env[68285]: DEBUG oslo_vmware.api [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.067118] env[68285]: DEBUG nova.scheduler.client.report [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1343.127867] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892586, 'name': CreateVM_Task, 'duration_secs': 0.402833} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.128052] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1343.128764] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.128934] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1343.129378] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1343.129731] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d13226ec-5ed4-48e0-89b8-88e9ad3b5239 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.134671] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1343.134671] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5282e21f-8e7b-8f01-f925-ccc6f2e0aa54" [ 1343.134671] env[68285]: _type = "Task" [ 1343.134671] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.141974] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5282e21f-8e7b-8f01-f925-ccc6f2e0aa54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.304227] env[68285]: DEBUG nova.network.neutron [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Updated VIF entry in instance network info cache for port 57bdb510-a168-422a-93f0-3e2db2eb694d. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1343.304606] env[68285]: DEBUG nova.network.neutron [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Updating instance_info_cache with network_info: [{"id": "57bdb510-a168-422a-93f0-3e2db2eb694d", "address": "fa:16:3e:d7:fb:70", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdb510-a1", "ovs_interfaceid": "57bdb510-a168-422a-93f0-3e2db2eb694d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.343305] env[68285]: DEBUG oslo_vmware.api [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Task: {'id': task-2892588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249013} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.343564] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1343.343754] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1343.343925] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1343.344128] env[68285]: INFO nova.compute.manager [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1343.344385] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1343.344579] env[68285]: DEBUG nova.compute.manager [-] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1343.344716] env[68285]: DEBUG nova.network.neutron [-] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1343.573020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.597304] env[68285]: INFO nova.scheduler.client.report [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Deleted allocations for instance 3858399e-9fc4-4d60-a9d5-95caefb7bd87 [ 1343.646952] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5282e21f-8e7b-8f01-f925-ccc6f2e0aa54, 'name': SearchDatastore_Task, 'duration_secs': 0.041969} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.647858] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.647858] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1343.647858] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.648049] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1343.648277] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1343.648588] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36062187-3871-477c-8bb7-c48508f331a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.660027] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1343.660027] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1343.660027] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68a42290-e7e2-4676-bf49-cb78891b1365 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.665228] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1343.665228] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]522c11a2-0146-c242-e3c6-a3af5e39ca3f" [ 1343.665228] env[68285]: _type = "Task" [ 1343.665228] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.675157] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522c11a2-0146-c242-e3c6-a3af5e39ca3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.806951] env[68285]: DEBUG oslo_concurrency.lockutils [req-45fa1f72-7efb-49ea-81cb-4a2f0efa7c05 req-93f0bd2c-0540-44fe-b63a-6c23449dc966 service nova] Releasing lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.816368] env[68285]: DEBUG nova.compute.manager [req-66d52968-bc57-477e-8dc0-e17d9e29c7ec req-63764759-92a2-444a-bdd1-2a860db81049 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Received event network-vif-deleted-e449375e-9811-46ce-83ca-faf0266e4837 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1343.816501] env[68285]: INFO nova.compute.manager [req-66d52968-bc57-477e-8dc0-e17d9e29c7ec req-63764759-92a2-444a-bdd1-2a860db81049 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Neutron deleted interface e449375e-9811-46ce-83ca-faf0266e4837; detaching it from the instance and deleting it from the info cache [ 1343.816710] env[68285]: DEBUG nova.network.neutron [req-66d52968-bc57-477e-8dc0-e17d9e29c7ec req-63764759-92a2-444a-bdd1-2a860db81049 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.911335] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.911972] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.912183] env[68285]: INFO nova.compute.manager [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Shelving [ 1344.106166] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7e296474-6cb9-478a-87a7-54b0cc30bd6f tempest-ServersTestJSON-16285486 tempest-ServersTestJSON-16285486-project-member] Lock "3858399e-9fc4-4d60-a9d5-95caefb7bd87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.305s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.180679] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]522c11a2-0146-c242-e3c6-a3af5e39ca3f, 'name': SearchDatastore_Task, 'duration_secs': 0.021358} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.181645] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2839ed9a-f024-4382-a632-cb2db094b5d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.187645] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1344.187645] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52951c96-c413-1f97-be99-175b812795ef" [ 1344.187645] env[68285]: _type = "Task" [ 1344.187645] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.194716] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52951c96-c413-1f97-be99-175b812795ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.299806] env[68285]: DEBUG nova.network.neutron [-] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.319554] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d87f8fe-ff4a-4cdf-bfbf-4751bd7e9afd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.329627] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d0b032-f023-4426-9cce-67fd89be7e68 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.363747] env[68285]: DEBUG nova.compute.manager [req-66d52968-bc57-477e-8dc0-e17d9e29c7ec req-63764759-92a2-444a-bdd1-2a860db81049 service nova] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Detach interface failed, port_id=e449375e-9811-46ce-83ca-faf0266e4837, reason: Instance 0d99fb99-977e-4edc-93d8-492d55fd68a7 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1344.698086] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52951c96-c413-1f97-be99-175b812795ef, 'name': SearchDatastore_Task, 'duration_secs': 0.021178} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.698359] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1344.698612] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/6f6037bf-5527-4391-857b-47bc68fb04fc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1344.698870] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81d87d14-3ae4-49f0-9aaa-5e4f2fb927ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.705115] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1344.705115] env[68285]: value = "task-2892589" [ 1344.705115] env[68285]: _type = "Task" [ 1344.705115] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.712695] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892589, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.801468] env[68285]: INFO nova.compute.manager [-] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Took 1.46 seconds to deallocate network for instance. [ 1344.921724] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1344.922076] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37be476d-cd1e-4942-8624-5718b9d3695e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.930242] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1344.930242] env[68285]: value = "task-2892590" [ 1344.930242] env[68285]: _type = "Task" [ 1344.930242] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.938069] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.223285] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892589, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.309246] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.309756] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.310045] env[68285]: DEBUG nova.objects.instance [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lazy-loading 'resources' on Instance uuid 0d99fb99-977e-4edc-93d8-492d55fd68a7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1345.441355] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892590, 'name': PowerOffVM_Task, 'duration_secs': 0.183146} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.441822] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1345.443030] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62599abe-f4e0-49a6-9d58-ea1819a8e38d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.466079] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447c619c-6e22-436b-84b0-783777a36144 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.716865] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892589, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666596} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.717253] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/6f6037bf-5527-4391-857b-47bc68fb04fc.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1345.717521] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1345.717899] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd7df747-26e1-47e0-b74d-d20d76db83d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.725416] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1345.725416] env[68285]: value = "task-2892591" [ 1345.725416] env[68285]: _type = "Task" [ 1345.725416] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.733924] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.977701] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1345.977701] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b584d39b-2625-4a79-8db0-884048ad36e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.984675] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273f36fb-a754-43bb-8bdd-16fec9d6bf77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.988483] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1345.988483] env[68285]: value = "task-2892592" [ 1345.988483] env[68285]: _type = "Task" [ 1345.988483] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.994794] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fd4856-bcdd-4441-be05-e95c58d2fa7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.000829] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892592, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.029578] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac74fc96-a411-4ba3-97ba-f595d438d155 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.037405] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b320102d-c459-4193-a697-7d41c1927119 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.051143] env[68285]: DEBUG nova.compute.provider_tree [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.235936] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070297} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.236230] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1346.237088] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8b1e54-7133-4188-91b8-a7ca3f4258a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.260054] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/6f6037bf-5527-4391-857b-47bc68fb04fc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1346.260054] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe6c8aa5-b11f-4986-8677-58824eb721a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.279875] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1346.279875] env[68285]: value = "task-2892593" [ 1346.279875] env[68285]: _type = "Task" [ 1346.279875] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.290330] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892593, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.498609] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892592, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.555014] env[68285]: DEBUG nova.scheduler.client.report [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1346.789579] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892593, 'name': ReconfigVM_Task, 'duration_secs': 0.501875} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.789864] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/6f6037bf-5527-4391-857b-47bc68fb04fc.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1346.790530] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1d4472a-616a-4066-9045-ba71922aab90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.796957] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1346.796957] env[68285]: value = "task-2892594" [ 1346.796957] env[68285]: _type = "Task" [ 1346.796957] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.805312] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892594, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.000929] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892592, 'name': CreateSnapshot_Task, 'duration_secs': 0.903569} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.001255] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1347.002061] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d7574f-4e3f-45a5-aace-c4d7bef6cbbe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.059870] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.081885] env[68285]: INFO nova.scheduler.client.report [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Deleted allocations for instance 0d99fb99-977e-4edc-93d8-492d55fd68a7 [ 1347.309159] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892594, 'name': Rename_Task, 'duration_secs': 0.1763} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.309758] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1347.309758] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49c5b161-7221-4200-8ea7-745efa1086c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.316803] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1347.316803] env[68285]: value = "task-2892595" [ 1347.316803] env[68285]: _type = "Task" [ 1347.316803] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.324996] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.520040] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1347.520729] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-79a70e1c-d33f-4b4c-bc17-dcf44e86639f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.529600] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1347.529600] env[68285]: value = "task-2892596" [ 1347.529600] env[68285]: _type = "Task" [ 1347.529600] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.538747] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892596, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.593474] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1326ee12-77d0-4371-a888-83630faff407 tempest-AttachVolumeNegativeTest-362351866 tempest-AttachVolumeNegativeTest-362351866-project-member] Lock "0d99fb99-977e-4edc-93d8-492d55fd68a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.867s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.745509] env[68285]: INFO nova.compute.manager [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Rebuilding instance [ 1347.805317] env[68285]: DEBUG nova.compute.manager [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1347.806255] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13452a46-0d6b-4239-8d43-5f173dcde8f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.829445] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892595, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.040887] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892596, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.334848] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892595, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.545023] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892596, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.821538] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1348.822592] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6e14bdf-9a9d-4687-9b93-2f9616560fec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.834528] env[68285]: DEBUG oslo_vmware.api [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892595, 'name': PowerOnVM_Task, 'duration_secs': 1.497269} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.835281] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1348.835492] env[68285]: INFO nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1348.835670] env[68285]: DEBUG nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1348.836340] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1348.836340] env[68285]: value = "task-2892598" [ 1348.836340] env[68285]: _type = "Task" [ 1348.836340] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.836705] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e418c12-b84a-45fb-b85d-f0035840f44e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.856721] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.042971] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892596, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.351375] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892598, 'name': PowerOffVM_Task, 'duration_secs': 0.327727} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.351375] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1349.351952] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1349.352312] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d5ad58c-6c1a-4e32-bde1-b8a88495e38e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.364936] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1349.364936] env[68285]: value = "task-2892599" [ 1349.364936] env[68285]: _type = "Task" [ 1349.364936] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.369787] env[68285]: INFO nova.compute.manager [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Took 14.96 seconds to build instance. [ 1349.377394] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1349.377719] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1349.378153] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581076', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'name': 'volume-256c2839-790d-4956-aefd-ad8ce558c59d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8a598506-724f-48f6-91a8-1e02483e6aab', 'attached_at': '', 'detached_at': '', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'serial': '256c2839-790d-4956-aefd-ad8ce558c59d'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1349.379267] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6691ed63-7274-446a-939b-c52f2caf9632 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.406696] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c87a46-c1a3-43d6-babb-f5f33d080949 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.417531] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3990ffa-aa3e-4cbf-98c0-fe873b6f7a2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.439466] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.441035] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7475c158-8d27-4c45-a214-fdba904b58b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.458655] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] The volume has not been displaced from its original location: [datastore2] volume-256c2839-790d-4956-aefd-ad8ce558c59d/volume-256c2839-790d-4956-aefd-ad8ce558c59d.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1349.464838] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1349.466153] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bd13093-228a-4e18-87bc-551c2d156a1a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.484894] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1349.484894] env[68285]: value = "task-2892600" [ 1349.484894] env[68285]: _type = "Task" [ 1349.484894] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.493654] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892600, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.542599] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892596, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.866660] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.871616] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c66dfd70-0b93-4b5f-b1a4-5a0cf2376571 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.476s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1349.995928] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892600, 'name': ReconfigVM_Task, 'duration_secs': 0.277502} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.996170] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1350.000913] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-498edb33-d53d-477a-aeb7-be860819997e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.015513] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1350.015513] env[68285]: value = "task-2892601" [ 1350.015513] env[68285]: _type = "Task" [ 1350.015513] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.023289] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.043091] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892596, 'name': CloneVM_Task, 'duration_secs': 2.358291} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.043323] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Created linked-clone VM from snapshot [ 1350.044116] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3153dc-40f1-4c27-b76c-dde2cf21176a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.051327] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Uploading image 771ad50d-8fe8-4388-9936-92056e5c4163 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1350.085714] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1350.085714] env[68285]: value = "vm-581095" [ 1350.085714] env[68285]: _type = "VirtualMachine" [ 1350.085714] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1350.085714] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-03a95a6c-79a7-43f6-974d-1071c66b954c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.091461] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease: (returnval){ [ 1350.091461] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52505bbf-566c-24d6-acc7-7f11488e2907" [ 1350.091461] env[68285]: _type = "HttpNfcLease" [ 1350.091461] env[68285]: } obtained for exporting VM: (result){ [ 1350.091461] env[68285]: value = "vm-581095" [ 1350.091461] env[68285]: _type = "VirtualMachine" [ 1350.091461] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1350.091874] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the lease: (returnval){ [ 1350.091874] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52505bbf-566c-24d6-acc7-7f11488e2907" [ 1350.091874] env[68285]: _type = "HttpNfcLease" [ 1350.091874] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1350.098385] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1350.098385] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52505bbf-566c-24d6-acc7-7f11488e2907" [ 1350.098385] env[68285]: _type = "HttpNfcLease" [ 1350.098385] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1350.430877] env[68285]: INFO nova.compute.manager [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Rescuing [ 1350.431245] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.431603] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.431603] env[68285]: DEBUG nova.network.neutron [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1350.525827] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892601, 'name': ReconfigVM_Task, 'duration_secs': 0.143385} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.526688] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581076', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'name': 'volume-256c2839-790d-4956-aefd-ad8ce558c59d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8a598506-724f-48f6-91a8-1e02483e6aab', 'attached_at': '', 'detached_at': '', 'volume_id': '256c2839-790d-4956-aefd-ad8ce558c59d', 'serial': '256c2839-790d-4956-aefd-ad8ce558c59d'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1350.526688] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1350.527285] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677b78d9-f90a-455e-bbbf-41482aae2ef1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.534867] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1350.534867] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a0cd42f-8b6e-4ffd-afb9-e36c6589d916 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.600649] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1350.601123] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1350.601447] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Deleting the datastore file [datastore2] 8a598506-724f-48f6-91a8-1e02483e6aab {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1350.604159] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34ee4d4c-4173-439f-90b2-7b8ae6d2cec5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.606641] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1350.606641] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52505bbf-566c-24d6-acc7-7f11488e2907" [ 1350.606641] env[68285]: _type = "HttpNfcLease" [ 1350.606641] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1350.608543] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1350.608543] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52505bbf-566c-24d6-acc7-7f11488e2907" [ 1350.608543] env[68285]: _type = "HttpNfcLease" [ 1350.608543] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1350.608741] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6e4a31-31a1-4323-87b7-012f3e8e2241 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.613239] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for the task: (returnval){ [ 1350.613239] env[68285]: value = "task-2892605" [ 1350.613239] env[68285]: _type = "Task" [ 1350.613239] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.620147] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52721200-91ef-5c5a-f209-da8690d67b56/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1350.620297] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52721200-91ef-5c5a-f209-da8690d67b56/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1350.683797] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.706458] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "faf810ae-7823-4115-a709-99dc7c480867" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.706458] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.706458] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "faf810ae-7823-4115-a709-99dc7c480867-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.706458] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.706749] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.708727] env[68285]: INFO nova.compute.manager [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Terminating instance [ 1350.735756] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7c8be425-3265-4892-bd93-eb85df70ab29 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.868455] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.126209] env[68285]: DEBUG oslo_vmware.api [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Task: {'id': task-2892605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099852} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.129493] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1351.130587] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1351.130587] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1351.176031] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "1d55a520-481f-4a47-bb06-9e794f9347a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1351.176327] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1351.179690] env[68285]: DEBUG nova.network.neutron [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Updating instance_info_cache with network_info: [{"id": "57bdb510-a168-422a-93f0-3e2db2eb694d", "address": "fa:16:3e:d7:fb:70", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdb510-a1", "ovs_interfaceid": "57bdb510-a168-422a-93f0-3e2db2eb694d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.206886] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1351.207517] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb5bf614-93a4-438c-8fbe-4c8fba44fc90 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.213470] env[68285]: DEBUG nova.compute.manager [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1351.213649] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1351.215176] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4e37753-4a6f-4ce5-9d44-f0df8b5b611d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.220065] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4421e065-d997-4f9e-9ae8-4d574e74cc5d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.238338] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1351.238338] env[68285]: value = "task-2892606" [ 1351.238338] env[68285]: _type = "Task" [ 1351.238338] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.247674] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.264099] env[68285]: ERROR nova.compute.manager [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Failed to detach volume 256c2839-790d-4956-aefd-ad8ce558c59d from /dev/sda: nova.exception.InstanceNotFound: Instance 8a598506-724f-48f6-91a8-1e02483e6aab could not be found. [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Traceback (most recent call last): [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self.driver.rebuild(**kwargs) [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] raise NotImplementedError() [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] NotImplementedError [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] During handling of the above exception, another exception occurred: [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Traceback (most recent call last): [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self.driver.detach_volume(context, old_connection_info, [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] return self._volumeops.detach_volume(connection_info, instance) [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self._detach_volume_vmdk(connection_info, instance) [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] stable_ref.fetch_moref(session) [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] nova.exception.InstanceNotFound: Instance 8a598506-724f-48f6-91a8-1e02483e6aab could not be found. [ 1351.264099] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.434913] env[68285]: DEBUG nova.compute.utils [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Build of instance 8a598506-724f-48f6-91a8-1e02483e6aab aborted: Failed to rebuild volume backed instance. {{(pid=68285) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1351.437652] env[68285]: ERROR nova.compute.manager [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 8a598506-724f-48f6-91a8-1e02483e6aab aborted: Failed to rebuild volume backed instance. [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Traceback (most recent call last): [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self.driver.rebuild(**kwargs) [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] raise NotImplementedError() [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] NotImplementedError [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] During handling of the above exception, another exception occurred: [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Traceback (most recent call last): [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self._detach_root_volume(context, instance, root_bdm) [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] with excutils.save_and_reraise_exception(): [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self.force_reraise() [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] raise self.value [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self.driver.detach_volume(context, old_connection_info, [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] return self._volumeops.detach_volume(connection_info, instance) [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self._detach_volume_vmdk(connection_info, instance) [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] stable_ref.fetch_moref(session) [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] nova.exception.InstanceNotFound: Instance 8a598506-724f-48f6-91a8-1e02483e6aab could not be found. [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] During handling of the above exception, another exception occurred: [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Traceback (most recent call last): [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] yield [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1351.437652] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self._do_rebuild_instance_with_claim( [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self._do_rebuild_instance( [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self._rebuild_default_impl(**kwargs) [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] self._rebuild_volume_backed_instance( [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] raise exception.BuildAbortException( [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] nova.exception.BuildAbortException: Build of instance 8a598506-724f-48f6-91a8-1e02483e6aab aborted: Failed to rebuild volume backed instance. [ 1351.439744] env[68285]: ERROR nova.compute.manager [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] [ 1351.683111] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.688337] env[68285]: DEBUG nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1351.749943] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892606, 'name': PowerOffVM_Task, 'duration_secs': 0.355844} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.750329] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1351.750634] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1351.750922] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581068', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'name': 'volume-bb993b4e-ec19-499c-a196-764a30b67abe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'faf810ae-7823-4115-a709-99dc7c480867', 'attached_at': '2025-03-10T15:59:53.000000', 'detached_at': '', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'serial': 'bb993b4e-ec19-499c-a196-764a30b67abe'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1351.752091] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b02a70-9bcc-467a-b66c-c054e4b60db5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.773914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6ddad5-d277-470a-a1af-38d79a45cb70 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.781681] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ef8410-edb6-4c66-afb9-eee9ada9ddf5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.800746] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec81852-2ef4-4756-ac57-af46a6a637fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.816997] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] The volume has not been displaced from its original location: [datastore1] volume-bb993b4e-ec19-499c-a196-764a30b67abe/volume-bb993b4e-ec19-499c-a196-764a30b67abe.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1351.822646] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1351.823152] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9ceff63-f988-4dad-96c9-5b7c5ea6c2d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.843424] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1351.843424] env[68285]: value = "task-2892607" [ 1351.843424] env[68285]: _type = "Task" [ 1351.843424] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.851997] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892607, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.209537] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.209887] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.211941] env[68285]: INFO nova.compute.claims [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1352.353325] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892607, 'name': ReconfigVM_Task, 'duration_secs': 0.170006} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.354043] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1352.358511] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0282d467-b49d-48bd-8064-681f3d33d648 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.375267] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1352.375267] env[68285]: value = "task-2892608" [ 1352.375267] env[68285]: _type = "Task" [ 1352.375267] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.387156] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892608, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.861146] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.866338] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.887859] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892608, 'name': ReconfigVM_Task, 'duration_secs': 0.210208} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.888313] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581068', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'name': 'volume-bb993b4e-ec19-499c-a196-764a30b67abe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'faf810ae-7823-4115-a709-99dc7c480867', 'attached_at': '2025-03-10T15:59:53.000000', 'detached_at': '', 'volume_id': 'bb993b4e-ec19-499c-a196-764a30b67abe', 'serial': 'bb993b4e-ec19-499c-a196-764a30b67abe'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1352.888708] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1352.889860] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecdc736-7d76-4fca-a93a-ffd417866e10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.898574] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1352.898872] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ecb23df-37df-4ce1-98b7-329b626cb3c6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.969587] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1352.969813] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1352.970037] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleting the datastore file [datastore1] faf810ae-7823-4115-a709-99dc7c480867 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1352.970368] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c07b1a2-8019-49c0-9120-33ec565a00bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.979345] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1352.979345] env[68285]: value = "task-2892611" [ 1352.979345] env[68285]: _type = "Task" [ 1352.979345] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.987876] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.226594] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1353.227063] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1f739f9-6129-4ad4-89fa-c862023e5cdf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.234710] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1353.234710] env[68285]: value = "task-2892612" [ 1353.234710] env[68285]: _type = "Task" [ 1353.234710] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.244788] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892612, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.426450] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dd6ebf-cb33-454e-87e8-a8687096b2f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.436018] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea0e35d-6288-440f-b441-0a97567cd992 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.471948] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1353.473061] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8897853-2d47-4563-bb7c-ec1ea2bf84fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.480515] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95aab9d2-c869-4ea3-a714-c337321b07a4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.498284] env[68285]: DEBUG nova.compute.provider_tree [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1353.502468] env[68285]: DEBUG oslo_vmware.api [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08084} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.502985] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.503172] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1353.503354] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1353.503518] env[68285]: INFO nova.compute.manager [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Took 2.29 seconds to destroy the instance on the hypervisor. [ 1353.503798] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1353.504031] env[68285]: DEBUG nova.compute.manager [-] [instance: faf810ae-7823-4115-a709-99dc7c480867] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1353.504139] env[68285]: DEBUG nova.network.neutron [-] [instance: faf810ae-7823-4115-a709-99dc7c480867] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1353.745649] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892612, 'name': PowerOffVM_Task, 'duration_secs': 0.213325} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.746117] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1353.746949] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5f4b29-bb68-4faa-97f6-3a57963755be {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.769099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquiring lock "8a598506-724f-48f6-91a8-1e02483e6aab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1353.769099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "8a598506-724f-48f6-91a8-1e02483e6aab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.769099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquiring lock "8a598506-724f-48f6-91a8-1e02483e6aab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1353.769099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "8a598506-724f-48f6-91a8-1e02483e6aab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.769099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "8a598506-724f-48f6-91a8-1e02483e6aab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.771807] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebaa7be-574d-4c06-90a3-0b314c58bd4f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.775782] env[68285]: INFO nova.compute.manager [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Terminating instance [ 1353.808990] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1353.809313] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b464ad16-2856-4d09-9385-72cede31268e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.816924] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1353.816924] env[68285]: value = "task-2892613" [ 1353.816924] env[68285]: _type = "Task" [ 1353.816924] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.827765] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892613, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.988828] env[68285]: DEBUG nova.compute.manager [req-9bb8f9c4-2e71-4788-ae7f-219fc085fb19 req-03658f3f-37ca-459f-83f2-ffb1419125f1 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Received event network-vif-deleted-c311cfc8-4f78-4068-8841-8aa0ce5243c2 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1353.989092] env[68285]: INFO nova.compute.manager [req-9bb8f9c4-2e71-4788-ae7f-219fc085fb19 req-03658f3f-37ca-459f-83f2-ffb1419125f1 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Neutron deleted interface c311cfc8-4f78-4068-8841-8aa0ce5243c2; detaching it from the instance and deleting it from the info cache [ 1353.989370] env[68285]: DEBUG nova.network.neutron [req-9bb8f9c4-2e71-4788-ae7f-219fc085fb19 req-03658f3f-37ca-459f-83f2-ffb1419125f1 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.004656] env[68285]: DEBUG nova.scheduler.client.report [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1354.285506] env[68285]: DEBUG nova.compute.manager [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1354.285879] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61d5529a-8bc2-4a8d-bde4-29250f8e6f37 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.295695] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9f8610-8856-465b-912e-2c42a0e00366 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.335218] env[68285]: WARNING nova.virt.vmwareapi.driver [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 8a598506-724f-48f6-91a8-1e02483e6aab could not be found. [ 1354.335692] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1354.336598] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a96bb322-92a0-4133-b07e-66383bbb470b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.342183] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1354.342387] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1354.342665] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.342836] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.343040] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1354.343292] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4064cdfc-1857-42d2-bdd5-976ea6e11792 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.349128] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ba7428-f223-4ca1-9a36-7ece3b96fd50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.360067] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1354.360253] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1354.361324] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd93900b-0d62-41b9-8ec5-367a0ea3a8f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.366244] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1354.366244] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52a55322-a7e3-33ed-64bc-6ff8f8222627" [ 1354.366244] env[68285]: _type = "Task" [ 1354.366244] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.374750] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a55322-a7e3-33ed-64bc-6ff8f8222627, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.386397] env[68285]: WARNING nova.virt.vmwareapi.vmops [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a598506-724f-48f6-91a8-1e02483e6aab could not be found. [ 1354.386595] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1354.386792] env[68285]: INFO nova.compute.manager [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1354.387039] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1354.387297] env[68285]: DEBUG nova.compute.manager [-] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1354.387389] env[68285]: DEBUG nova.network.neutron [-] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1354.469141] env[68285]: DEBUG nova.network.neutron [-] [instance: faf810ae-7823-4115-a709-99dc7c480867] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.491734] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff4f631d-6652-40e3-ae3c-93e9a0bdf060 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.502599] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22ce9a4-1513-4769-a602-27d94673f7c4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.514060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.304s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.514625] env[68285]: DEBUG nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1354.517532] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.046s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1354.544958] env[68285]: DEBUG nova.compute.manager [req-9bb8f9c4-2e71-4788-ae7f-219fc085fb19 req-03658f3f-37ca-459f-83f2-ffb1419125f1 service nova] [instance: faf810ae-7823-4115-a709-99dc7c480867] Detach interface failed, port_id=c311cfc8-4f78-4068-8841-8aa0ce5243c2, reason: Instance faf810ae-7823-4115-a709-99dc7c480867 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1354.729446] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52b741f-17f8-4df1-a22b-0cf1fff4a2b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.737703] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b361d38-d210-4dcc-bd04-c06ce843e1d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.770602] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b20791-ea27-4c69-862c-c6223dd29618 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.778534] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b70a5fa-daf0-4147-8a90-6d59a0eca878 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.792780] env[68285]: DEBUG nova.compute.provider_tree [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.868880] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.869451] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.884608] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52a55322-a7e3-33ed-64bc-6ff8f8222627, 'name': SearchDatastore_Task, 'duration_secs': 0.009497} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.885613] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de90b5aa-3f8b-48e0-9ef2-9b4f8f3efc39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.893858] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1354.893858] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5227c6c1-95f8-6dab-1c9d-a6cb9daa7480" [ 1354.893858] env[68285]: _type = "Task" [ 1354.893858] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.906896] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5227c6c1-95f8-6dab-1c9d-a6cb9daa7480, 'name': SearchDatastore_Task, 'duration_secs': 0.011557} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.907178] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1354.907435] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. {{(pid=68285) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1354.907700] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d8148cf-77c1-4f3f-9dd9-74d9cd257a9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.916396] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1354.916396] env[68285]: value = "task-2892614" [ 1354.916396] env[68285]: _type = "Task" [ 1354.916396] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.924728] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892614, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.972292] env[68285]: INFO nova.compute.manager [-] [instance: faf810ae-7823-4115-a709-99dc7c480867] Took 1.47 seconds to deallocate network for instance. [ 1355.023666] env[68285]: DEBUG nova.compute.utils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1355.024964] env[68285]: DEBUG nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1355.026191] env[68285]: DEBUG nova.network.neutron [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1355.072365] env[68285]: DEBUG nova.policy [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd4bbc7b1468461eb59734d8f0f8720c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a89fac62d57547399212e15163aab79d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1355.299154] env[68285]: DEBUG nova.scheduler.client.report [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1355.365207] env[68285]: DEBUG nova.network.neutron [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Successfully created port: 0f581a63-e768-4216-916e-e7800527ee44 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1355.370578] env[68285]: DEBUG nova.network.neutron [-] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.376954] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.429550] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892614, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.528103] env[68285]: INFO nova.compute.manager [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Took 0.56 seconds to detach 1 volumes for instance. [ 1355.533055] env[68285]: DEBUG nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1355.535157] env[68285]: DEBUG nova.compute.manager [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: faf810ae-7823-4115-a709-99dc7c480867] Deleting volume: bb993b4e-ec19-499c-a196-764a30b67abe {{(pid=68285) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1355.805798] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.288s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.807923] env[68285]: INFO nova.compute.manager [None req-4672b67a-0947-4be0-b2b0-7997f8da4dd7 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Successfully reverted task state from rebuilding on failure for instance. [ 1355.811494] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.435s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.811494] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.811494] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1355.812854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2a427d-f935-4714-a29d-402e105e9128 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.822155] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1445c67a-cad8-43c7-99e3-696d715fe3e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.837153] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d94c332-02a5-47d8-90cf-b8eb165321fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.844421] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771cda19-c505-4080-b028-060fc8553900 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.881930] env[68285]: INFO nova.compute.manager [-] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Took 1.49 seconds to deallocate network for instance. [ 1355.882814] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179344MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1355.882987] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.883236] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.932325] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892614, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520597} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.932325] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk. [ 1355.932325] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9637d2-1836-4e28-8364-953cab2d3543 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.961255] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1355.963660] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79c7ac49-1273-45d8-b814-4bec3ee70c10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.988026] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1355.988026] env[68285]: value = "task-2892616" [ 1355.988026] env[68285]: _type = "Task" [ 1355.988026] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.995523] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892616, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.084607] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.147117] env[68285]: DEBUG nova.compute.manager [req-286f7667-35b8-45e9-92bd-7da26a4be809 req-819db8d3-f175-4513-aa80-061d8d100545 service nova] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Received event network-vif-deleted-d215cd64-22e2-46be-88b2-f3185156486b {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1356.463218] env[68285]: INFO nova.compute.manager [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Took 0.58 seconds to detach 1 volumes for instance. [ 1356.465193] env[68285]: DEBUG nova.compute.manager [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Deleting volume: 256c2839-790d-4956-aefd-ad8ce558c59d {{(pid=68285) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1356.497279] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892616, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.545468] env[68285]: DEBUG nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1356.579229] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1356.579496] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.579656] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1356.579839] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.580051] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1356.580310] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1356.580567] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1356.580729] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1356.581430] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1356.581620] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1356.581802] env[68285]: DEBUG nova.virt.hardware [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1356.582887] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928a5b3f-813c-4fe9-988f-5b2ee2d16f10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.596402] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d60b15a-9cd4-4526-a50c-498c2b29708c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.907038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0593c42-a05a-4f35-b2e2-cdf22bbce1ac tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.907423] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0593c42-a05a-4f35-b2e2-cdf22bbce1ac tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.907825] env[68285]: DEBUG nova.objects.instance [None req-b0593c42-a05a-4f35-b2e2-cdf22bbce1ac tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'flavor' on Instance uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1356.976961] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.977164] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance d1446290-95ce-4e87-85df-7cc69bb57ce7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.977301] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 801f524e-28b5-4452-b880-0fc30d3c5eef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.977595] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance feda1a98-3086-43a6-a887-f4d1602ca8ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.977595] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance b2199b56-64bd-4096-b877-e10656b09313 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.977687] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 852ab501-00a6-442b-804a-1bbf49a2be8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.977740] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a1dc8c86-523f-4474-9fea-9ccf35a36b3f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.977859] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance e3117ede-5d88-4e47-a32f-ea91b1ba83ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.978035] env[68285]: WARNING nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance faf810ae-7823-4115-a709-99dc7c480867 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1356.978178] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9ddeb48e-ef72-4e6e-9058-d45ebde7583e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.978296] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance c341075b-9d30-45db-9d83-f196bf90ecd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.978407] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 6f6037bf-5527-4391-857b-47bc68fb04fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.978529] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 1d55a520-481f-4a47-bb06-9e794f9347a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1356.978878] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1356.983302] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1357.003104] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892616, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.040625] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1357.195185] env[68285]: DEBUG nova.network.neutron [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Successfully updated port: 0f581a63-e768-4216-916e-e7800527ee44 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1357.248508] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fa6c89-e914-4eb8-9b35-9fef10cd6da9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.257274] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550dbe74-682f-49df-bf16-7e2d7f69e3cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.291768] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f98298-8542-4aa6-9190-9cf4e95ac213 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.305257] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc0de12-2976-4eb9-b75d-e61e89c4e528 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.318350] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1357.418653] env[68285]: DEBUG nova.objects.instance [None req-b0593c42-a05a-4f35-b2e2-cdf22bbce1ac tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'pci_requests' on Instance uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1357.497735] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892616, 'name': ReconfigVM_Task, 'duration_secs': 1.245574} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.498113] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc/ce84ab4c-9913-42dc-b839-714ad2184867-rescue.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1357.499335] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b121ed4-fd9c-4463-9e14-18a6c2720689 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.525388] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9227a332-2a54-4181-a30e-875e2b3ba1ce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.541281] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1357.541281] env[68285]: value = "task-2892618" [ 1357.541281] env[68285]: _type = "Task" [ 1357.541281] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.549986] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892618, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.702358] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "refresh_cache-1d55a520-481f-4a47-bb06-9e794f9347a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.702358] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquired lock "refresh_cache-1d55a520-481f-4a47-bb06-9e794f9347a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1357.702534] env[68285]: DEBUG nova.network.neutron [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1357.821372] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1357.918597] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52721200-91ef-5c5a-f209-da8690d67b56/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1357.919519] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9936d69a-6581-4cde-b6fe-f6d096c561a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.922353] env[68285]: DEBUG nova.objects.base [None req-b0593c42-a05a-4f35-b2e2-cdf22bbce1ac tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Object Instance<9ddeb48e-ef72-4e6e-9058-d45ebde7583e> lazy-loaded attributes: flavor,pci_requests {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1357.922545] env[68285]: DEBUG nova.network.neutron [None req-b0593c42-a05a-4f35-b2e2-cdf22bbce1ac tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1357.928487] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52721200-91ef-5c5a-f209-da8690d67b56/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1357.928617] env[68285]: ERROR oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52721200-91ef-5c5a-f209-da8690d67b56/disk-0.vmdk due to incomplete transfer. [ 1357.928860] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-76a27370-ff16-4b1d-8816-475ebdd483ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.936172] env[68285]: DEBUG oslo_vmware.rw_handles [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52721200-91ef-5c5a-f209-da8690d67b56/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1357.936372] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Uploaded image 771ad50d-8fe8-4388-9936-92056e5c4163 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1357.938556] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1357.938960] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0e8ede9f-30ef-4161-8d59-ad8aa3c5d8ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.947022] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1357.947022] env[68285]: value = "task-2892619" [ 1357.947022] env[68285]: _type = "Task" [ 1357.947022] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.952912] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892619, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.009820] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b0593c42-a05a-4f35-b2e2-cdf22bbce1ac tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.102s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.051660] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892618, 'name': ReconfigVM_Task, 'duration_secs': 0.211252} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.051963] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1358.052239] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c964c83-6f70-4b83-b332-9c31654ddef2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.058755] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1358.058755] env[68285]: value = "task-2892620" [ 1358.058755] env[68285]: _type = "Task" [ 1358.058755] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.066515] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.236809] env[68285]: DEBUG nova.network.neutron [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1358.326259] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1358.326499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.443s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.326777] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.242s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.326964] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.329058] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.289s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.329275] env[68285]: DEBUG nova.objects.instance [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lazy-loading 'resources' on Instance uuid 8a598506-724f-48f6-91a8-1e02483e6aab {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.357633] env[68285]: INFO nova.scheduler.client.report [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted allocations for instance faf810ae-7823-4115-a709-99dc7c480867 [ 1358.392983] env[68285]: DEBUG nova.compute.manager [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Received event network-vif-plugged-0f581a63-e768-4216-916e-e7800527ee44 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1358.392983] env[68285]: DEBUG oslo_concurrency.lockutils [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] Acquiring lock "1d55a520-481f-4a47-bb06-9e794f9347a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.392983] env[68285]: DEBUG oslo_concurrency.lockutils [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.392983] env[68285]: DEBUG oslo_concurrency.lockutils [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.393247] env[68285]: DEBUG nova.compute.manager [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] No waiting events found dispatching network-vif-plugged-0f581a63-e768-4216-916e-e7800527ee44 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1358.393572] env[68285]: WARNING nova.compute.manager [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Received unexpected event network-vif-plugged-0f581a63-e768-4216-916e-e7800527ee44 for instance with vm_state building and task_state spawning. [ 1358.394132] env[68285]: DEBUG nova.compute.manager [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Received event network-changed-0f581a63-e768-4216-916e-e7800527ee44 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1358.395406] env[68285]: DEBUG nova.compute.manager [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Refreshing instance network info cache due to event network-changed-0f581a63-e768-4216-916e-e7800527ee44. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1358.395624] env[68285]: DEBUG oslo_concurrency.lockutils [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] Acquiring lock "refresh_cache-1d55a520-481f-4a47-bb06-9e794f9347a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.402375] env[68285]: DEBUG nova.network.neutron [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Updating instance_info_cache with network_info: [{"id": "0f581a63-e768-4216-916e-e7800527ee44", "address": "fa:16:3e:70:80:6a", "network": {"id": "40f70454-4bfa-4531-ad71-a105561f27fe", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-59926880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a89fac62d57547399212e15163aab79d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f581a63-e7", "ovs_interfaceid": "0f581a63-e768-4216-916e-e7800527ee44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.459137] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892619, 'name': Destroy_Task, 'duration_secs': 0.468307} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.459570] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Destroyed the VM [ 1358.459957] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1358.460306] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-546d62ec-df2e-4c44-8f61-cc35222adfae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.466411] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1358.466411] env[68285]: value = "task-2892621" [ 1358.466411] env[68285]: _type = "Task" [ 1358.466411] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.475746] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892621, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.569899] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892620, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.866839] env[68285]: DEBUG oslo_concurrency.lockutils [None req-79be2613-761a-429b-9fbe-53146f233fd6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "faf810ae-7823-4115-a709-99dc7c480867" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.161s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.906531] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Releasing lock "refresh_cache-1d55a520-481f-4a47-bb06-9e794f9347a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1358.906883] env[68285]: DEBUG nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Instance network_info: |[{"id": "0f581a63-e768-4216-916e-e7800527ee44", "address": "fa:16:3e:70:80:6a", "network": {"id": "40f70454-4bfa-4531-ad71-a105561f27fe", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-59926880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a89fac62d57547399212e15163aab79d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f581a63-e7", "ovs_interfaceid": "0f581a63-e768-4216-916e-e7800527ee44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1358.907193] env[68285]: DEBUG oslo_concurrency.lockutils [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] Acquired lock "refresh_cache-1d55a520-481f-4a47-bb06-9e794f9347a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1358.907394] env[68285]: DEBUG nova.network.neutron [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Refreshing network info cache for port 0f581a63-e768-4216-916e-e7800527ee44 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1358.908474] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:80:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f581a63-e768-4216-916e-e7800527ee44', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1358.915888] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Creating folder: Project (a89fac62d57547399212e15163aab79d). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1358.920899] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d94bcc35-9f31-455a-84d1-9c28c12ccec8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.934815] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Created folder: Project (a89fac62d57547399212e15163aab79d) in parent group-v580775. [ 1358.935027] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Creating folder: Instances. Parent ref: group-v581096. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1358.935278] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88ee6738-9b40-41b7-bd3a-056117b31ef8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.946241] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Created folder: Instances in parent group-v581096. [ 1358.946500] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1358.946721] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1358.946941] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f756d1bf-9ca0-43f9-9f0d-6dab8e8e92c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.971760] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1358.971760] env[68285]: value = "task-2892624" [ 1358.971760] env[68285]: _type = "Task" [ 1358.971760] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.981537] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892621, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.983920] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892624, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.011375] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b68c164-6207-4236-be7d-1885997fcf63 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.020167] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98a1134-ad29-4c79-bcd8-47493aeede86 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.057905] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f250173c-1037-4284-83d6-6fedf5e03551 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.072617] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d107f2bd-eff8-4ef0-8cb0-5f14e4bb6f3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.076432] env[68285]: DEBUG oslo_vmware.api [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892620, 'name': PowerOnVM_Task, 'duration_secs': 0.521607} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.076700] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1359.080028] env[68285]: DEBUG nova.compute.manager [None req-3315138d-d9af-4d84-8853-b7874eda7a16 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1359.080809] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16c0c49-56d3-40cc-868e-74a31f35b18a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.092447] env[68285]: DEBUG nova.compute.provider_tree [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.166949] env[68285]: DEBUG nova.network.neutron [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Updated VIF entry in instance network info cache for port 0f581a63-e768-4216-916e-e7800527ee44. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1359.167726] env[68285]: DEBUG nova.network.neutron [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Updating instance_info_cache with network_info: [{"id": "0f581a63-e768-4216-916e-e7800527ee44", "address": "fa:16:3e:70:80:6a", "network": {"id": "40f70454-4bfa-4531-ad71-a105561f27fe", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-59926880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a89fac62d57547399212e15163aab79d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f581a63-e7", "ovs_interfaceid": "0f581a63-e768-4216-916e-e7800527ee44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.327247] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.327454] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1359.673378] env[68285]: DEBUG nova.scheduler.client.report [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1359.679893] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892621, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.681306] env[68285]: DEBUG oslo_concurrency.lockutils [req-b2c2726c-ac4f-4351-b13c-f9719190fd7e req-8bcca5e7-52c4-4e38-a2f8-8d270dbc225b service nova] Releasing lock "refresh_cache-1d55a520-481f-4a47-bb06-9e794f9347a7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.682669] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892624, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.979376] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892621, 'name': RemoveSnapshot_Task, 'duration_secs': 1.094533} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.980062] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1359.980338] env[68285]: DEBUG nova.compute.manager [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1359.981191] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca51c64-d482-4179-b127-398515379fe9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.986450] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892624, 'name': CreateVM_Task, 'duration_secs': 0.626672} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.986922] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1359.998209] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.998491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.998965] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1359.999323] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23537962-922b-44b6-a0e5-ae8ecd9341f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.004986] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1360.004986] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bbfef4-365f-ede5-532c-2a88c2b1f4de" [ 1360.004986] env[68285]: _type = "Task" [ 1360.004986] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.013155] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bbfef4-365f-ede5-532c-2a88c2b1f4de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.186110] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.187992] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.188279] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.188725] env[68285]: DEBUG nova.objects.instance [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'flavor' on Instance uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.497651] env[68285]: INFO nova.compute.manager [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Shelve offloading [ 1360.515982] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bbfef4-365f-ede5-532c-2a88c2b1f4de, 'name': SearchDatastore_Task, 'duration_secs': 0.013944} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.516321] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.516544] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1360.516775] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.516923] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1360.517116] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.517447] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f2c3538-4b39-4224-9c72-c1001960a6a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.526017] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.526207] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1360.526893] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-768c9682-1625-4b94-8a02-d6581fd4e783 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.532389] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1360.532389] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52344342-226f-c022-c77b-2d9979e577a8" [ 1360.532389] env[68285]: _type = "Task" [ 1360.532389] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.539702] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "b2199b56-64bd-4096-b877-e10656b09313" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.539917] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "b2199b56-64bd-4096-b877-e10656b09313" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.540135] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "b2199b56-64bd-4096-b877-e10656b09313-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.540317] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "b2199b56-64bd-4096-b877-e10656b09313-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.540479] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "b2199b56-64bd-4096-b877-e10656b09313-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.541913] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52344342-226f-c022-c77b-2d9979e577a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.542386] env[68285]: INFO nova.compute.manager [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Terminating instance [ 1360.707453] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2dced15b-5534-4505-aafb-dea58de51148 tempest-ServerActionsV293TestJSON-2062219040 tempest-ServerActionsV293TestJSON-2062219040-project-member] Lock "8a598506-724f-48f6-91a8-1e02483e6aab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.940s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.761284] env[68285]: DEBUG nova.objects.instance [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'pci_requests' on Instance uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.822937] env[68285]: INFO nova.compute.manager [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Unrescuing [ 1360.822937] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.823126] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquired lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1360.823265] env[68285]: DEBUG nova.network.neutron [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1361.001560] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1361.001911] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65755443-c186-42e2-b924-77337d6138aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.009868] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1361.009868] env[68285]: value = "task-2892625" [ 1361.009868] env[68285]: _type = "Task" [ 1361.009868] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.017463] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892625, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.041652] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52344342-226f-c022-c77b-2d9979e577a8, 'name': SearchDatastore_Task, 'duration_secs': 0.013496} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.042468] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d492615b-226a-4bcf-9d8c-e1f0f80d620b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.045327] env[68285]: DEBUG nova.compute.manager [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1361.045546] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1361.046304] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e477adc2-9393-4d35-9bc7-e774d08ee70c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.049939] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1361.049939] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524d10b1-e1d4-74ac-2f63-c3c9599bcc57" [ 1361.049939] env[68285]: _type = "Task" [ 1361.049939] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.054833] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1361.055359] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2f44386-455d-4baf-956f-52eeaf7d1cd9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.059902] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]524d10b1-e1d4-74ac-2f63-c3c9599bcc57, 'name': SearchDatastore_Task, 'duration_secs': 0.009857} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.060479] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1361.060752] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 1d55a520-481f-4a47-bb06-9e794f9347a7/1d55a520-481f-4a47-bb06-9e794f9347a7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1361.060970] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a697897b-e5ba-42ef-832f-f15afd40da88 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.064093] env[68285]: DEBUG oslo_vmware.api [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1361.064093] env[68285]: value = "task-2892626" [ 1361.064093] env[68285]: _type = "Task" [ 1361.064093] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.067746] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1361.067746] env[68285]: value = "task-2892627" [ 1361.067746] env[68285]: _type = "Task" [ 1361.067746] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.075764] env[68285]: DEBUG oslo_vmware.api [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.080795] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.264105] env[68285]: DEBUG nova.objects.base [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Object Instance<9ddeb48e-ef72-4e6e-9058-d45ebde7583e> lazy-loaded attributes: flavor,pci_requests {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1361.264334] env[68285]: DEBUG nova.network.neutron [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1361.304758] env[68285]: DEBUG nova.policy [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1361.520883] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1361.521133] env[68285]: DEBUG nova.compute.manager [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1361.521944] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd36d7f6-4fd4-401a-8e4d-4e9c62557ef7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.528313] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.528491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1361.528669] env[68285]: DEBUG nova.network.neutron [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1361.576294] env[68285]: DEBUG oslo_vmware.api [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892626, 'name': PowerOffVM_Task, 'duration_secs': 0.165846} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.579426] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1361.579598] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1361.579858] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892627, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50491} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.580140] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe43fe2f-0f53-4e6d-bdf8-4358514e381b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.581592] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 1d55a520-481f-4a47-bb06-9e794f9347a7/1d55a520-481f-4a47-bb06-9e794f9347a7.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1361.581799] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1361.582061] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-daa3480a-acad-4381-9f0f-933c8c3aa1a5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.588499] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1361.588499] env[68285]: value = "task-2892629" [ 1361.588499] env[68285]: _type = "Task" [ 1361.588499] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.654400] env[68285]: DEBUG nova.network.neutron [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Updating instance_info_cache with network_info: [{"id": "57bdb510-a168-422a-93f0-3e2db2eb694d", "address": "fa:16:3e:d7:fb:70", "network": {"id": "5a60e0fe-6186-4391-93ec-abfcb9af9900", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1879073887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f732a9946001482bb76dee4e2cf844c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdb510-a1", "ovs_interfaceid": "57bdb510-a168-422a-93f0-3e2db2eb694d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.662884] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1361.662884] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1361.662884] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleting the datastore file [datastore1] b2199b56-64bd-4096-b877-e10656b09313 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1361.662884] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5e4e68b-7edb-4a56-bcef-70145a187d64 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.669114] env[68285]: DEBUG oslo_vmware.api [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1361.669114] env[68285]: value = "task-2892630" [ 1361.669114] env[68285]: _type = "Task" [ 1361.669114] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.676753] env[68285]: DEBUG oslo_vmware.api [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.748386] env[68285]: DEBUG nova.network.neutron [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Successfully created port: b7625de4-8596-45da-a5c4-db92e344c774 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1362.098146] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072161} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.098428] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1362.099219] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829e0edb-fb69-46e3-976e-b5089fac19f5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.122900] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 1d55a520-481f-4a47-bb06-9e794f9347a7/1d55a520-481f-4a47-bb06-9e794f9347a7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1362.123126] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a4e96a3-8fcb-4254-b128-906e1845248c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.145996] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1362.145996] env[68285]: value = "task-2892631" [ 1362.145996] env[68285]: _type = "Task" [ 1362.145996] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.154229] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.158153] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Releasing lock "refresh_cache-6f6037bf-5527-4391-857b-47bc68fb04fc" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1362.158560] env[68285]: DEBUG nova.objects.instance [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lazy-loading 'flavor' on Instance uuid 6f6037bf-5527-4391-857b-47bc68fb04fc {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1362.179199] env[68285]: DEBUG oslo_vmware.api [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142771} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.179475] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1362.179657] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1362.179894] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1362.180072] env[68285]: INFO nova.compute.manager [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: b2199b56-64bd-4096-b877-e10656b09313] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1362.180319] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1362.180548] env[68285]: DEBUG nova.compute.manager [-] [instance: b2199b56-64bd-4096-b877-e10656b09313] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1362.180646] env[68285]: DEBUG nova.network.neutron [-] [instance: b2199b56-64bd-4096-b877-e10656b09313] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1362.262503] env[68285]: DEBUG nova.network.neutron [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10199287-90", "ovs_interfaceid": "10199287-9009-48cc-b97a-e94229f7d640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.497953] env[68285]: DEBUG nova.compute.manager [req-c73681f5-db77-4fdd-9d0f-28bf60fe9ed3 req-f9bcd279-210d-4944-96ac-5ea920bc11c6 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Received event network-vif-deleted-cdb4de32-3a3d-4f10-abb3-9d403cde25c7 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1362.498339] env[68285]: INFO nova.compute.manager [req-c73681f5-db77-4fdd-9d0f-28bf60fe9ed3 req-f9bcd279-210d-4944-96ac-5ea920bc11c6 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Neutron deleted interface cdb4de32-3a3d-4f10-abb3-9d403cde25c7; detaching it from the instance and deleting it from the info cache [ 1362.498572] env[68285]: DEBUG nova.network.neutron [req-c73681f5-db77-4fdd-9d0f-28bf60fe9ed3 req-f9bcd279-210d-4944-96ac-5ea920bc11c6 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.661476] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.667374] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b953b11d-b9c3-45ff-bbfb-af60fd953d3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.693223] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1362.693604] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-318000c0-9461-4ecb-9f94-7f951a65c9ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.701532] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1362.701532] env[68285]: value = "task-2892632" [ 1362.701532] env[68285]: _type = "Task" [ 1362.701532] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.709900] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892632, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.765717] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1362.951092] env[68285]: DEBUG nova.network.neutron [-] [instance: b2199b56-64bd-4096-b877-e10656b09313] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.002174] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0954caef-1eab-4c7c-9bbf-a80752a813ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.012031] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53cf98b-964a-4976-9c1d-a11434912d7c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.046715] env[68285]: DEBUG nova.compute.manager [req-c73681f5-db77-4fdd-9d0f-28bf60fe9ed3 req-f9bcd279-210d-4944-96ac-5ea920bc11c6 service nova] [instance: b2199b56-64bd-4096-b877-e10656b09313] Detach interface failed, port_id=cdb4de32-3a3d-4f10-abb3-9d403cde25c7, reason: Instance b2199b56-64bd-4096-b877-e10656b09313 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1363.140414] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1363.144035] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ada999d-3b2e-4fae-a899-6118ef9c743b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.149301] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1363.152687] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99077ecb-57bc-40e2-9d46-8c151b423099 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.158773] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892631, 'name': ReconfigVM_Task, 'duration_secs': 0.719681} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.159046] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 1d55a520-481f-4a47-bb06-9e794f9347a7/1d55a520-481f-4a47-bb06-9e794f9347a7.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1363.159657] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78d9ab37-0ef0-44de-8a87-938a8e82484f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.165470] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1363.165470] env[68285]: value = "task-2892634" [ 1363.165470] env[68285]: _type = "Task" [ 1363.165470] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.170191] env[68285]: DEBUG nova.compute.manager [req-4e7cbba6-7899-4652-b432-3a29ac1c3a95 req-bc3a4e23-f0a9-4501-8cf8-6558b6cefbfb service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-vif-plugged-b7625de4-8596-45da-a5c4-db92e344c774 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1363.170394] env[68285]: DEBUG oslo_concurrency.lockutils [req-4e7cbba6-7899-4652-b432-3a29ac1c3a95 req-bc3a4e23-f0a9-4501-8cf8-6558b6cefbfb service nova] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.170592] env[68285]: DEBUG oslo_concurrency.lockutils [req-4e7cbba6-7899-4652-b432-3a29ac1c3a95 req-bc3a4e23-f0a9-4501-8cf8-6558b6cefbfb service nova] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.170777] env[68285]: DEBUG oslo_concurrency.lockutils [req-4e7cbba6-7899-4652-b432-3a29ac1c3a95 req-bc3a4e23-f0a9-4501-8cf8-6558b6cefbfb service nova] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.170943] env[68285]: DEBUG nova.compute.manager [req-4e7cbba6-7899-4652-b432-3a29ac1c3a95 req-bc3a4e23-f0a9-4501-8cf8-6558b6cefbfb service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] No waiting events found dispatching network-vif-plugged-b7625de4-8596-45da-a5c4-db92e344c774 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1363.171119] env[68285]: WARNING nova.compute.manager [req-4e7cbba6-7899-4652-b432-3a29ac1c3a95 req-bc3a4e23-f0a9-4501-8cf8-6558b6cefbfb service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received unexpected event network-vif-plugged-b7625de4-8596-45da-a5c4-db92e344c774 for instance with vm_state active and task_state None. [ 1363.179097] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892634, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.212900] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892632, 'name': PowerOffVM_Task, 'duration_secs': 0.197163} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.213300] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1363.219513] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Reconfiguring VM instance instance-00000077 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1363.221281] env[68285]: DEBUG nova.network.neutron [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Successfully updated port: b7625de4-8596-45da-a5c4-db92e344c774 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1363.222279] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-695e7a7f-ab93-42bd-ab34-eaa6da674b07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.237867] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1363.237867] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1363.237867] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleting the datastore file [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1363.238115] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.238276] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1363.238441] env[68285]: DEBUG nova.network.neutron [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1363.240227] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72b02443-47be-4b97-9c85-1b782284f5eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.250056] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1363.250056] env[68285]: value = "task-2892636" [ 1363.250056] env[68285]: _type = "Task" [ 1363.250056] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.250308] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1363.250308] env[68285]: value = "task-2892635" [ 1363.250308] env[68285]: _type = "Task" [ 1363.250308] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.262152] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892636, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.265282] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.454156] env[68285]: INFO nova.compute.manager [-] [instance: b2199b56-64bd-4096-b877-e10656b09313] Took 1.27 seconds to deallocate network for instance. [ 1363.676038] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892634, 'name': Rename_Task, 'duration_secs': 0.16547} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.676336] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1363.676603] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0aefd34b-92d5-422b-a049-7b95b0645659 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.682475] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1363.682475] env[68285]: value = "task-2892637" [ 1363.682475] env[68285]: _type = "Task" [ 1363.682475] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.690323] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.763214] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892636, 'name': ReconfigVM_Task, 'duration_secs': 0.264471} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.767558] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Reconfigured VM instance instance-00000077 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1363.767558] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1363.767558] env[68285]: DEBUG oslo_vmware.api [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892635, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199729} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.767991] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9766c3c2-4da7-43f0-bae3-0e5bb1d27350 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.769687] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1363.769871] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1363.770944] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1363.778608] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1363.778608] env[68285]: value = "task-2892638" [ 1363.778608] env[68285]: _type = "Task" [ 1363.778608] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.785568] env[68285]: WARNING nova.network.neutron [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d already exists in list: networks containing: ['c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d']. ignoring it [ 1363.794379] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892638, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.799251] env[68285]: INFO nova.scheduler.client.report [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted allocations for instance e3117ede-5d88-4e47-a32f-ea91b1ba83ec [ 1363.961571] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.961878] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.962141] env[68285]: DEBUG nova.objects.instance [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'resources' on Instance uuid b2199b56-64bd-4096-b877-e10656b09313 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1364.108444] env[68285]: DEBUG nova.network.neutron [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b7625de4-8596-45da-a5c4-db92e344c774", "address": "fa:16:3e:eb:36:2e", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7625de4-85", "ovs_interfaceid": "b7625de4-8596-45da-a5c4-db92e344c774", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.194169] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892637, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.291727] env[68285]: DEBUG oslo_vmware.api [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892638, 'name': PowerOnVM_Task, 'duration_secs': 0.447454} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.291995] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1364.292239] env[68285]: DEBUG nova.compute.manager [None req-39b8b7b1-d288-4a06-8e0b-c51fb6139d42 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1364.293109] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943183b4-6334-45a4-bf81-5d062eccbbb0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.302524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.606365] env[68285]: DEBUG nova.compute.manager [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-vif-unplugged-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1364.606365] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.606541] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1364.606738] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.606857] env[68285]: DEBUG nova.compute.manager [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] No waiting events found dispatching network-vif-unplugged-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1364.607060] env[68285]: WARNING nova.compute.manager [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received unexpected event network-vif-unplugged-10199287-9009-48cc-b97a-e94229f7d640 for instance with vm_state shelved_offloaded and task_state unshelving. [ 1364.607156] env[68285]: DEBUG nova.compute.manager [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-changed-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1364.607347] env[68285]: DEBUG nova.compute.manager [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing instance network info cache due to event network-changed-10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1364.607571] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] Acquiring lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.607715] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] Acquired lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.607876] env[68285]: DEBUG nova.network.neutron [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing network info cache for port 10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1364.610628] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e9c6cd-79f1-4fa4-8e65-64db0ee983f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.614260] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1364.614800] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.614952] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.615979] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644e3d34-c613-43ed-9fe2-8c0fac0edf30 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.635072] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60c2b78-bfb6-412c-a6d6-8a6802013f6e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.639011] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1364.639240] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1364.639398] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1364.639579] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1364.639727] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1364.639958] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1364.640086] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1364.640250] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1364.640415] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1364.640575] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1364.640746] env[68285]: DEBUG nova.virt.hardware [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1364.646916] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfiguring VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1364.647469] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65b0a5cb-3d24-4446-a693-27393419904c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.689765] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512ba2d8-39d4-44b7-8477-02ea91de84b2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.692118] env[68285]: DEBUG oslo_vmware.api [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1364.692118] env[68285]: value = "task-2892639" [ 1364.692118] env[68285]: _type = "Task" [ 1364.692118] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.701276] env[68285]: DEBUG oslo_vmware.api [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892637, 'name': PowerOnVM_Task, 'duration_secs': 0.554123} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.703017] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df92335a-aaca-4572-9c85-3c5c869f5cf0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.709447] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1364.709754] env[68285]: INFO nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Took 8.16 seconds to spawn the instance on the hypervisor. [ 1364.710048] env[68285]: DEBUG nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1364.713756] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b16cc0-a388-4649-a8c7-4f69aaf361eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.716073] env[68285]: DEBUG oslo_vmware.api [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892639, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.726465] env[68285]: DEBUG nova.compute.provider_tree [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.201268] env[68285]: DEBUG nova.compute.manager [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-changed-b7625de4-8596-45da-a5c4-db92e344c774 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1365.201542] env[68285]: DEBUG nova.compute.manager [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing instance network info cache due to event network-changed-b7625de4-8596-45da-a5c4-db92e344c774. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1365.201665] env[68285]: DEBUG oslo_concurrency.lockutils [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.201808] env[68285]: DEBUG oslo_concurrency.lockutils [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1365.201971] env[68285]: DEBUG nova.network.neutron [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing network info cache for port b7625de4-8596-45da-a5c4-db92e344c774 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1365.209158] env[68285]: DEBUG oslo_vmware.api [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892639, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.233583] env[68285]: DEBUG nova.scheduler.client.report [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1365.246494] env[68285]: INFO nova.compute.manager [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Took 13.05 seconds to build instance. [ 1365.358880] env[68285]: DEBUG nova.network.neutron [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updated VIF entry in instance network info cache for port 10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1365.359332] env[68285]: DEBUG nova.network.neutron [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap10199287-90", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.460286] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "6f6037bf-5527-4391-857b-47bc68fb04fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.460572] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.460790] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "6f6037bf-5527-4391-857b-47bc68fb04fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.460995] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.461192] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.463581] env[68285]: INFO nova.compute.manager [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Terminating instance [ 1365.652635] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.703027] env[68285]: DEBUG oslo_vmware.api [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892639, 'name': ReconfigVM_Task, 'duration_secs': 0.551069} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.703552] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1365.703767] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfigured VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1365.742277] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.744339] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.442s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.744567] env[68285]: DEBUG nova.objects.instance [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'resources' on Instance uuid e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1365.748786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7df1518d-9b4b-4d58-8305-90095d5ee98f tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.573s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.769084] env[68285]: INFO nova.scheduler.client.report [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted allocations for instance b2199b56-64bd-4096-b877-e10656b09313 [ 1365.861962] env[68285]: DEBUG oslo_concurrency.lockutils [req-dc32c5c0-2de3-4a5a-806e-a0686ea57b7c req-591eb033-ae0d-4b08-a5f9-acd7d424ae92 service nova] Releasing lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1365.925394] env[68285]: DEBUG nova.network.neutron [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updated VIF entry in instance network info cache for port b7625de4-8596-45da-a5c4-db92e344c774. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1365.925825] env[68285]: DEBUG nova.network.neutron [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b7625de4-8596-45da-a5c4-db92e344c774", "address": "fa:16:3e:eb:36:2e", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7625de4-85", "ovs_interfaceid": "b7625de4-8596-45da-a5c4-db92e344c774", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.967877] env[68285]: DEBUG nova.compute.manager [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1365.968119] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1365.969235] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa507a7-28a9-4758-b019-11c20201ee7f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.977548] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1365.977777] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bcc1992-5f67-4756-aff5-d18a9c80c2a0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.983987] env[68285]: DEBUG oslo_vmware.api [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1365.983987] env[68285]: value = "task-2892640" [ 1365.983987] env[68285]: _type = "Task" [ 1365.983987] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.991515] env[68285]: DEBUG oslo_vmware.api [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.210121] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d49221f8-71bf-4f33-867e-dbcce56e6449 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.022s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.247599] env[68285]: DEBUG nova.objects.instance [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'numa_topology' on Instance uuid e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1366.277296] env[68285]: DEBUG oslo_concurrency.lockutils [None req-242a4bd7-0062-4a72-a815-b873ec1ec959 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "b2199b56-64bd-4096-b877-e10656b09313" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.737s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.428161] env[68285]: DEBUG oslo_concurrency.lockutils [req-90cf5021-09b0-426e-8f49-f44e6c90c03f req-d5a6016a-6aa6-488e-8eaa-29c90b1242e8 service nova] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1366.493890] env[68285]: DEBUG oslo_vmware.api [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892640, 'name': PowerOffVM_Task, 'duration_secs': 0.258348} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.494196] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1366.494373] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1366.494615] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c1b08b3-6136-46e1-90d3-67a88f1b959f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.545064] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "1d55a520-481f-4a47-bb06-9e794f9347a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.545347] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.545607] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "1d55a520-481f-4a47-bb06-9e794f9347a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.545831] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.546049] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.548245] env[68285]: INFO nova.compute.manager [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Terminating instance [ 1366.750045] env[68285]: DEBUG nova.objects.base [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1366.904317] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6010c6-afb0-470c-a265-38066766b5d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.913030] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bad3d75-fa24-409c-80e6-54e4d24cd1bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.949120] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4536c74f-ee0f-4e6f-b00f-7df9df3bd72b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.956994] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac9f796-8401-4f59-a878-b065916a1834 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.970610] env[68285]: DEBUG nova.compute.provider_tree [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1367.052203] env[68285]: DEBUG nova.compute.manager [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1367.052380] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1367.053353] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c47b75-b9dc-4e09-bbf7-f7a824e6a468 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.061689] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1367.061950] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31dbc332-f3a5-4e30-ae01-7a4fb4d884cf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.069330] env[68285]: DEBUG oslo_vmware.api [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1367.069330] env[68285]: value = "task-2892643" [ 1367.069330] env[68285]: _type = "Task" [ 1367.069330] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.077646] env[68285]: DEBUG oslo_vmware.api [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.198859] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1367.198859] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1367.198859] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Deleting the datastore file [datastore1] 6f6037bf-5527-4391-857b-47bc68fb04fc {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1367.198859] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-003949e1-1fc6-4fb7-bb78-a3fb4986527d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.205943] env[68285]: DEBUG oslo_vmware.api [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1367.205943] env[68285]: value = "task-2892644" [ 1367.205943] env[68285]: _type = "Task" [ 1367.205943] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.214872] env[68285]: DEBUG oslo_vmware.api [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.494870] env[68285]: ERROR nova.scheduler.client.report [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [req-062e9908-0d91-43b3-b768-2faf7758f25b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7bdf675d-15ae-4a4b-9c03-79d8c773b76b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-062e9908-0d91-43b3-b768-2faf7758f25b"}]} [ 1367.511120] env[68285]: DEBUG nova.scheduler.client.report [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1367.525300] env[68285]: DEBUG nova.scheduler.client.report [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1367.525688] env[68285]: DEBUG nova.compute.provider_tree [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1367.539628] env[68285]: DEBUG nova.scheduler.client.report [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1367.557919] env[68285]: DEBUG nova.scheduler.client.report [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1367.579323] env[68285]: DEBUG oslo_vmware.api [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892643, 'name': PowerOffVM_Task, 'duration_secs': 0.188399} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.579323] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1367.579455] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1367.581812] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09c1410c-742d-416c-a13e-6a3e3974b67b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.715340] env[68285]: DEBUG oslo_vmware.api [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335643} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.715340] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1367.715501] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1367.715666] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1367.715836] env[68285]: INFO nova.compute.manager [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Took 1.75 seconds to destroy the instance on the hypervisor. [ 1367.716084] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1367.717120] env[68285]: DEBUG nova.compute.manager [-] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1367.717195] env[68285]: DEBUG nova.network.neutron [-] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1367.719205] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8451f546-eb32-4819-8215-c3f6803aac8d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.726480] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfcd9e2-c563-4a6d-b211-a3991abd58d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.764087] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74b3804-04e3-4e8b-bb02-5839b6780336 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.774900] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3af00c9-4a13-4d54-be45-78aed6a76224 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.790840] env[68285]: DEBUG nova.compute.provider_tree [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1367.966805] env[68285]: DEBUG nova.compute.manager [req-c1af5976-7fb5-41ce-8518-44a63aeadae9 req-eb8aa14d-337c-404f-b82f-459ef66df6b6 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Received event network-vif-deleted-57bdb510-a168-422a-93f0-3e2db2eb694d {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1367.967044] env[68285]: INFO nova.compute.manager [req-c1af5976-7fb5-41ce-8518-44a63aeadae9 req-eb8aa14d-337c-404f-b82f-459ef66df6b6 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Neutron deleted interface 57bdb510-a168-422a-93f0-3e2db2eb694d; detaching it from the instance and deleting it from the info cache [ 1367.967173] env[68285]: DEBUG nova.network.neutron [req-c1af5976-7fb5-41ce-8518-44a63aeadae9 req-eb8aa14d-337c-404f-b82f-459ef66df6b6 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.006469] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1368.006696] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1368.006876] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Deleting the datastore file [datastore2] 1d55a520-481f-4a47-bb06-9e794f9347a7 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1368.007165] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19b0b8c7-2c44-4481-aa19-516ffe71a9c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.013728] env[68285]: DEBUG oslo_vmware.api [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for the task: (returnval){ [ 1368.013728] env[68285]: value = "task-2892646" [ 1368.013728] env[68285]: _type = "Task" [ 1368.013728] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.021420] env[68285]: DEBUG oslo_vmware.api [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.269849] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.270248] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.270495] env[68285]: DEBUG nova.objects.instance [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'flavor' on Instance uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.324810] env[68285]: DEBUG nova.scheduler.client.report [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updated inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1368.325172] env[68285]: DEBUG nova.compute.provider_tree [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b generation from 176 to 177 during operation: update_inventory {{(pid=68285) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1368.325299] env[68285]: DEBUG nova.compute.provider_tree [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1368.449707] env[68285]: DEBUG nova.network.neutron [-] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.469634] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88b5e7a9-6e90-4e52-ad28-a59950639cda {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.479631] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d1bab1-be84-45a8-9505-8b2c513d4958 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.510106] env[68285]: DEBUG nova.compute.manager [req-c1af5976-7fb5-41ce-8518-44a63aeadae9 req-eb8aa14d-337c-404f-b82f-459ef66df6b6 service nova] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Detach interface failed, port_id=57bdb510-a168-422a-93f0-3e2db2eb694d, reason: Instance 6f6037bf-5527-4391-857b-47bc68fb04fc could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1368.521802] env[68285]: DEBUG oslo_vmware.api [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Task: {'id': task-2892646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377235} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.522027] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1368.522218] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1368.522389] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1368.522561] env[68285]: INFO nova.compute.manager [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Took 1.47 seconds to destroy the instance on the hypervisor. [ 1368.522786] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1368.522999] env[68285]: DEBUG nova.compute.manager [-] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1368.523111] env[68285]: DEBUG nova.network.neutron [-] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1368.831669] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.086s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.867526] env[68285]: DEBUG nova.objects.instance [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'pci_requests' on Instance uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.952052] env[68285]: INFO nova.compute.manager [-] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Took 1.23 seconds to deallocate network for instance. [ 1369.259359] env[68285]: DEBUG nova.network.neutron [-] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.339640] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d9a29ce6-b93b-4693-8069-8a3a42781464 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.428s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1369.340852] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.688s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1369.341045] env[68285]: INFO nova.compute.manager [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Unshelving [ 1369.369400] env[68285]: DEBUG nova.objects.base [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Object Instance<9ddeb48e-ef72-4e6e-9058-d45ebde7583e> lazy-loaded attributes: flavor,pci_requests {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1369.369645] env[68285]: DEBUG nova.network.neutron [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1369.445550] env[68285]: DEBUG nova.policy [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1369.458037] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1369.458172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1369.458393] env[68285]: DEBUG nova.objects.instance [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lazy-loading 'resources' on Instance uuid 6f6037bf-5527-4391-857b-47bc68fb04fc {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1369.762545] env[68285]: INFO nova.compute.manager [-] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Took 1.24 seconds to deallocate network for instance. [ 1370.068130] env[68285]: DEBUG nova.compute.manager [req-e2b3b90e-0cc2-433c-bb30-a9207b04226f req-f38d218b-26d2-402d-a341-dba0be96653a service nova] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Received event network-vif-deleted-0f581a63-e768-4216-916e-e7800527ee44 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1370.092438] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e4a716-fd51-4cc7-ad13-c31a1c0073bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.101346] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e877170-c327-4555-91d9-f751c22e4a11 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.130558] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1384045b-dfc4-4d33-a1f2-04d29265dbb8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.137676] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab462883-4d70-4376-a316-7032ffa28a67 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.150623] env[68285]: DEBUG nova.compute.provider_tree [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.269243] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.361833] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.439172] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.439448] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.440219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.440428] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.440607] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.442720] env[68285]: INFO nova.compute.manager [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Terminating instance [ 1370.653785] env[68285]: DEBUG nova.scheduler.client.report [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1370.883058] env[68285]: DEBUG nova.network.neutron [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Successfully updated port: ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1370.946813] env[68285]: DEBUG nova.compute.manager [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1370.947018] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1370.947951] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebd4621-8b40-4b1e-8bc2-1e204c8ed8cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.955896] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.956165] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14f78cff-980f-4db9-a567-ed83d05233ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.962321] env[68285]: DEBUG oslo_vmware.api [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1370.962321] env[68285]: value = "task-2892647" [ 1370.962321] env[68285]: _type = "Task" [ 1370.962321] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.970832] env[68285]: DEBUG oslo_vmware.api [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.158748] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.162074] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.892s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1371.162074] env[68285]: DEBUG nova.objects.instance [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lazy-loading 'resources' on Instance uuid 1d55a520-481f-4a47-bb06-9e794f9347a7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1371.180222] env[68285]: INFO nova.scheduler.client.report [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Deleted allocations for instance 6f6037bf-5527-4391-857b-47bc68fb04fc [ 1371.385170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.385543] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1371.385667] env[68285]: DEBUG nova.network.neutron [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1371.471799] env[68285]: DEBUG oslo_vmware.api [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892647, 'name': PowerOffVM_Task, 'duration_secs': 0.272708} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.472081] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1371.472285] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1371.472539] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c4410c1-867c-4c91-bb62-8434c68e8f40 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.535174] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1371.535415] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1371.535610] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleting the datastore file [datastore2] d0f6ab86-e18d-42ac-bcf3-94eafb1939ff {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1371.535875] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cda444ec-02b3-4e19-be66-6b2c3b0697c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.542278] env[68285]: DEBUG oslo_vmware.api [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for the task: (returnval){ [ 1371.542278] env[68285]: value = "task-2892649" [ 1371.542278] env[68285]: _type = "Task" [ 1371.542278] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.550057] env[68285]: DEBUG oslo_vmware.api [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.688039] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5455afe2-c32f-4eee-8a22-0592230614a1 tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "6f6037bf-5527-4391-857b-47bc68fb04fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.227s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.793306] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef43e4d8-e493-4ef1-a2e9-d9ae7f1e1de0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.800760] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6f3544-bef4-4f31-a90e-b379b1c8674d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.830563] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81b6c78-8585-4b19-81d3-3bfdefbf1592 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.837740] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4c7164-57bc-4d08-b1ee-32290e40a94c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.852700] env[68285]: DEBUG nova.compute.provider_tree [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.925902] env[68285]: WARNING nova.network.neutron [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d already exists in list: networks containing: ['c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d']. ignoring it [ 1371.926143] env[68285]: WARNING nova.network.neutron [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d already exists in list: networks containing: ['c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d']. ignoring it [ 1372.051955] env[68285]: DEBUG oslo_vmware.api [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Task: {'id': task-2892649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119666} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.054385] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1372.054580] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1372.054753] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1372.054947] env[68285]: INFO nova.compute.manager [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1372.055209] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1372.055407] env[68285]: DEBUG nova.compute.manager [-] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1372.055501] env[68285]: DEBUG nova.network.neutron [-] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1372.093177] env[68285]: DEBUG nova.compute.manager [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-vif-plugged-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1372.093401] env[68285]: DEBUG oslo_concurrency.lockutils [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.093604] env[68285]: DEBUG oslo_concurrency.lockutils [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.093765] env[68285]: DEBUG oslo_concurrency.lockutils [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.093934] env[68285]: DEBUG nova.compute.manager [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] No waiting events found dispatching network-vif-plugged-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1372.094124] env[68285]: WARNING nova.compute.manager [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received unexpected event network-vif-plugged-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 for instance with vm_state active and task_state None. [ 1372.094286] env[68285]: DEBUG nova.compute.manager [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-changed-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1372.094531] env[68285]: DEBUG nova.compute.manager [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing instance network info cache due to event network-changed-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1372.094595] env[68285]: DEBUG oslo_concurrency.lockutils [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.207378] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "c341075b-9d30-45db-9d83-f196bf90ecd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.207615] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.207829] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "c341075b-9d30-45db-9d83-f196bf90ecd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.208093] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.208308] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.210455] env[68285]: INFO nova.compute.manager [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Terminating instance [ 1372.311146] env[68285]: DEBUG nova.compute.manager [req-62723739-28fe-4dc1-bde9-20a0a88efc8c req-51acb954-a3ca-4bdf-8943-c43c17595910 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Received event network-vif-deleted-655ee17d-c9b8-43d9-b783-8c0a559a8300 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1372.311328] env[68285]: INFO nova.compute.manager [req-62723739-28fe-4dc1-bde9-20a0a88efc8c req-51acb954-a3ca-4bdf-8943-c43c17595910 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Neutron deleted interface 655ee17d-c9b8-43d9-b783-8c0a559a8300; detaching it from the instance and deleting it from the info cache [ 1372.311489] env[68285]: DEBUG nova.network.neutron [req-62723739-28fe-4dc1-bde9-20a0a88efc8c req-51acb954-a3ca-4bdf-8943-c43c17595910 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.345272] env[68285]: DEBUG nova.network.neutron [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b7625de4-8596-45da-a5c4-db92e344c774", "address": "fa:16:3e:eb:36:2e", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7625de4-85", "ovs_interfaceid": "b7625de4-8596-45da-a5c4-db92e344c774", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9", "address": "fa:16:3e:3d:dd:f5", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddaa0bce-71", "ovs_interfaceid": "ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.356303] env[68285]: DEBUG nova.scheduler.client.report [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1372.469833] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.470188] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.714413] env[68285]: DEBUG nova.compute.manager [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1372.714634] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1372.715564] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c76ce6c-52d9-4074-a7fa-2be9f0cecf3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.723386] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1372.723597] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63d7a797-e356-409c-8562-cb60a5948639 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.729927] env[68285]: DEBUG oslo_vmware.api [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1372.729927] env[68285]: value = "task-2892650" [ 1372.729927] env[68285]: _type = "Task" [ 1372.729927] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.737262] env[68285]: DEBUG oslo_vmware.api [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.794188] env[68285]: DEBUG nova.network.neutron [-] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.813875] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18a24320-1bd5-4c83-8d25-497ebe3f4176 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.825517] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904438b7-77cd-4918-b84a-39c3fa83280c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.854729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1372.855421] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.855585] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1372.855891] env[68285]: DEBUG nova.compute.manager [req-62723739-28fe-4dc1-bde9-20a0a88efc8c req-51acb954-a3ca-4bdf-8943-c43c17595910 service nova] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Detach interface failed, port_id=655ee17d-c9b8-43d9-b783-8c0a559a8300, reason: Instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1372.856276] env[68285]: DEBUG oslo_concurrency.lockutils [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1372.856455] env[68285]: DEBUG nova.network.neutron [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Refreshing network info cache for port ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.858027] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf05790c-bee6-4774-928a-52294be51c98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.861084] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.863582] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.502s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.863678] env[68285]: DEBUG nova.objects.instance [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'pci_requests' on Instance uuid e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1372.879330] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1372.879565] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1372.879706] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1372.879886] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1372.880036] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1372.880188] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1372.880391] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1372.880548] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1372.880711] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1372.880871] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1372.881060] env[68285]: DEBUG nova.virt.hardware [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1372.887494] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfiguring VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1372.888601] env[68285]: INFO nova.scheduler.client.report [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Deleted allocations for instance 1d55a520-481f-4a47-bb06-9e794f9347a7 [ 1372.890483] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d875a652-debe-4e45-b39f-6d7dce4687d1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.909838] env[68285]: DEBUG oslo_vmware.api [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1372.909838] env[68285]: value = "task-2892651" [ 1372.909838] env[68285]: _type = "Task" [ 1372.909838] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.918160] env[68285]: DEBUG oslo_vmware.api [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892651, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.976107] env[68285]: INFO nova.compute.manager [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Detaching volume 6bb48a0e-a10d-4be0-a276-644bf6d1632d [ 1373.013138] env[68285]: INFO nova.virt.block_device [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Attempting to driver detach volume 6bb48a0e-a10d-4be0-a276-644bf6d1632d from mountpoint /dev/sdb [ 1373.013393] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1373.013587] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581086', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'name': 'volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '852ab501-00a6-442b-804a-1bbf49a2be8c', 'attached_at': '', 'detached_at': '', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'serial': '6bb48a0e-a10d-4be0-a276-644bf6d1632d'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1373.014855] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f98dfb-c143-4ea1-96ab-a0f6463cf6eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.036511] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10b0dcc-10dc-43dd-8fe9-9943964393cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.043645] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083ae2fe-ff8a-4cc3-b5f7-00c052f1e296 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.067722] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfdae56-95fe-4cf8-b8d8-c26c5a747927 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.082737] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] The volume has not been displaced from its original location: [datastore2] volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d/volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1373.087859] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1373.088162] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73d18f1c-ecda-4477-8bc2-3213f5a23742 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.105732] env[68285]: DEBUG oslo_vmware.api [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1373.105732] env[68285]: value = "task-2892652" [ 1373.105732] env[68285]: _type = "Task" [ 1373.105732] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.113563] env[68285]: DEBUG oslo_vmware.api [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892652, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.240185] env[68285]: DEBUG oslo_vmware.api [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892650, 'name': PowerOffVM_Task, 'duration_secs': 0.252279} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.240492] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1373.240713] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1373.241020] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dd12c11-44bd-4b9a-a0a7-551d8af2fac0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.296509] env[68285]: INFO nova.compute.manager [-] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Took 1.24 seconds to deallocate network for instance. [ 1373.332027] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1373.332027] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1373.332027] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Deleting the datastore file [datastore2] c341075b-9d30-45db-9d83-f196bf90ecd3 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1373.332027] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96442176-9234-40b0-9d5c-199b1e02dd5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.338988] env[68285]: DEBUG oslo_vmware.api [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for the task: (returnval){ [ 1373.338988] env[68285]: value = "task-2892654" [ 1373.338988] env[68285]: _type = "Task" [ 1373.338988] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.347148] env[68285]: DEBUG oslo_vmware.api [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.367195] env[68285]: DEBUG nova.objects.instance [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'numa_topology' on Instance uuid e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1373.409084] env[68285]: DEBUG oslo_concurrency.lockutils [None req-92b698ed-bec1-49ca-ba47-0ef1ee37d7eb tempest-ServersNegativeTestMultiTenantJSON-1664311986 tempest-ServersNegativeTestMultiTenantJSON-1664311986-project-member] Lock "1d55a520-481f-4a47-bb06-9e794f9347a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.864s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.423846] env[68285]: DEBUG oslo_vmware.api [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892651, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.602598] env[68285]: DEBUG nova.network.neutron [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updated VIF entry in instance network info cache for port ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.602891] env[68285]: DEBUG nova.network.neutron [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b7625de4-8596-45da-a5c4-db92e344c774", "address": "fa:16:3e:eb:36:2e", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7625de4-85", "ovs_interfaceid": "b7625de4-8596-45da-a5c4-db92e344c774", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9", "address": "fa:16:3e:3d:dd:f5", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddaa0bce-71", "ovs_interfaceid": "ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.616769] env[68285]: DEBUG oslo_vmware.api [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892652, 'name': ReconfigVM_Task, 'duration_secs': 0.234027} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.617039] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1373.621926] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-238d7843-a62e-4012-95a9-36dd940676f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.637737] env[68285]: DEBUG oslo_vmware.api [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1373.637737] env[68285]: value = "task-2892655" [ 1373.637737] env[68285]: _type = "Task" [ 1373.637737] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.646628] env[68285]: DEBUG oslo_vmware.api [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.802766] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1373.849083] env[68285]: DEBUG oslo_vmware.api [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Task: {'id': task-2892654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208147} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.849343] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1373.849529] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1373.849708] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1373.849879] env[68285]: INFO nova.compute.manager [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1373.850129] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1373.850320] env[68285]: DEBUG nova.compute.manager [-] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1373.850415] env[68285]: DEBUG nova.network.neutron [-] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1373.869618] env[68285]: INFO nova.compute.claims [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1373.921352] env[68285]: DEBUG oslo_vmware.api [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892651, 'name': ReconfigVM_Task, 'duration_secs': 0.564353} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.921823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1373.922042] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfigured VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1374.106082] env[68285]: DEBUG oslo_concurrency.lockutils [req-a84e200f-e965-4219-ba7c-3af1d65da114 req-9af4a8ac-b89b-4061-9610-608fdfd9723f service nova] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1374.148494] env[68285]: DEBUG oslo_vmware.api [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892655, 'name': ReconfigVM_Task, 'duration_secs': 0.152122} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.148793] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581086', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'name': 'volume-6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '852ab501-00a6-442b-804a-1bbf49a2be8c', 'attached_at': '', 'detached_at': '', 'volume_id': '6bb48a0e-a10d-4be0-a276-644bf6d1632d', 'serial': '6bb48a0e-a10d-4be0-a276-644bf6d1632d'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1374.365992] env[68285]: DEBUG nova.compute.manager [req-c9e0f6b1-5a87-40b6-b12a-31f98b13bc5c req-a43b0a49-2e01-4ca3-acab-848aa6f4dcd3 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Received event network-vif-deleted-e08da820-30b8-48ec-b099-d1f963c95d5e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1374.365992] env[68285]: INFO nova.compute.manager [req-c9e0f6b1-5a87-40b6-b12a-31f98b13bc5c req-a43b0a49-2e01-4ca3-acab-848aa6f4dcd3 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Neutron deleted interface e08da820-30b8-48ec-b099-d1f963c95d5e; detaching it from the instance and deleting it from the info cache [ 1374.365992] env[68285]: DEBUG nova.network.neutron [req-c9e0f6b1-5a87-40b6-b12a-31f98b13bc5c req-a43b0a49-2e01-4ca3-acab-848aa6f4dcd3 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.426582] env[68285]: DEBUG oslo_concurrency.lockutils [None req-72b98d4a-f0c4-4b3a-bcbf-5bead5d39455 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.156s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.790937] env[68285]: DEBUG nova.objects.instance [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lazy-loading 'flavor' on Instance uuid 852ab501-00a6-442b-804a-1bbf49a2be8c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1374.829098] env[68285]: DEBUG nova.network.neutron [-] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.868131] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-329425bc-f400-438a-bd0a-47740dd9ffab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.881300] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5d34d8-9024-49dc-a4cb-3b4407b469e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.910538] env[68285]: DEBUG nova.compute.manager [req-c9e0f6b1-5a87-40b6-b12a-31f98b13bc5c req-a43b0a49-2e01-4ca3-acab-848aa6f4dcd3 service nova] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Detach interface failed, port_id=e08da820-30b8-48ec-b099-d1f963c95d5e, reason: Instance c341075b-9d30-45db-9d83-f196bf90ecd3 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1375.007096] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574beb20-e7ac-4463-a088-90436a7adfad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.015414] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488d098e-7931-4ea8-8263-d1c24c6aa137 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.047091] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b8d541-6ebe-4939-8741-e408563c19db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.054914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de61d07-7188-4886-84f8-f08cf29e4e21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.068060] env[68285]: DEBUG nova.compute.provider_tree [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.333308] env[68285]: INFO nova.compute.manager [-] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Took 1.48 seconds to deallocate network for instance. [ 1375.571847] env[68285]: DEBUG nova.scheduler.client.report [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1375.800759] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b66f5127-5743-4631-96f7-7a93cf92d86b tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.330s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1375.843962] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.079483] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.216s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.082021] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.279s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.082258] env[68285]: DEBUG nova.objects.instance [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lazy-loading 'resources' on Instance uuid d0f6ab86-e18d-42ac-bcf3-94eafb1939ff {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1376.126095] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-b7625de4-8596-45da-a5c4-db92e344c774" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.126525] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-b7625de4-8596-45da-a5c4-db92e344c774" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.128337] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.128638] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.128912] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "852ab501-00a6-442b-804a-1bbf49a2be8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.129188] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.129417] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.132345] env[68285]: INFO nova.network.neutron [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating port 10199287-9009-48cc-b97a-e94229f7d640 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1376.136037] env[68285]: INFO nova.compute.manager [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Terminating instance [ 1376.632743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.633022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1376.633946] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bacd37-432c-47b2-93cc-f2a8c787994d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.641170] env[68285]: DEBUG nova.compute.manager [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1376.641170] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1376.641879] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1301e17-cee8-461b-83e3-2abf33e91178 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.659859] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a2e2e2-2e32-41d4-abdc-ca9ff270aa68 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.664429] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1376.665021] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13f4579c-1a55-47b9-9f15-da2c15cf2bb7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.690603] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfiguring VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1376.694717] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89cc8550-2ba5-4522-8f37-7807a2e485f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.707662] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1376.707662] env[68285]: value = "task-2892656" [ 1376.707662] env[68285]: _type = "Task" [ 1376.707662] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.714552] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1376.714552] env[68285]: value = "task-2892657" [ 1376.714552] env[68285]: _type = "Task" [ 1376.714552] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.717569] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.725780] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.781753] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d42f79-1433-4ba8-ab1f-0ff42e16f8f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.791298] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25918c07-f264-41a5-891d-b07a60d788c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.823585] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4901db3f-56eb-456c-a966-3f578f49101c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.831201] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f18f00e-9f9d-45c3-bbda-c133d1e18b0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.844773] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.845029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.845239] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.845425] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.845635] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.847508] env[68285]: DEBUG nova.compute.provider_tree [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.849352] env[68285]: INFO nova.compute.manager [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Terminating instance [ 1377.218242] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892656, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.225911] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.352277] env[68285]: DEBUG nova.scheduler.client.report [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1377.355759] env[68285]: DEBUG nova.compute.manager [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1377.355958] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.356791] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345f7de9-c566-4aae-8217-843c8c335c12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.364454] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.364944] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-096d8f8e-6950-456e-8dc2-0ff0a457d8ba {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.370920] env[68285]: DEBUG oslo_vmware.api [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1377.370920] env[68285]: value = "task-2892658" [ 1377.370920] env[68285]: _type = "Task" [ 1377.370920] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.378564] env[68285]: DEBUG oslo_vmware.api [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892658, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.669391] env[68285]: DEBUG nova.compute.manager [req-b900fe46-18d5-429f-b026-e906d6434dcf req-d0bf7b50-8f90-4762-acf8-e838563b007b service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-vif-plugged-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1377.669545] env[68285]: DEBUG oslo_concurrency.lockutils [req-b900fe46-18d5-429f-b026-e906d6434dcf req-d0bf7b50-8f90-4762-acf8-e838563b007b service nova] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.669750] env[68285]: DEBUG oslo_concurrency.lockutils [req-b900fe46-18d5-429f-b026-e906d6434dcf req-d0bf7b50-8f90-4762-acf8-e838563b007b service nova] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.669930] env[68285]: DEBUG oslo_concurrency.lockutils [req-b900fe46-18d5-429f-b026-e906d6434dcf req-d0bf7b50-8f90-4762-acf8-e838563b007b service nova] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1377.670127] env[68285]: DEBUG nova.compute.manager [req-b900fe46-18d5-429f-b026-e906d6434dcf req-d0bf7b50-8f90-4762-acf8-e838563b007b service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] No waiting events found dispatching network-vif-plugged-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1377.670307] env[68285]: WARNING nova.compute.manager [req-b900fe46-18d5-429f-b026-e906d6434dcf req-d0bf7b50-8f90-4762-acf8-e838563b007b service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received unexpected event network-vif-plugged-10199287-9009-48cc-b97a-e94229f7d640 for instance with vm_state shelved_offloaded and task_state spawning. [ 1377.719471] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892656, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.727644] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.764548] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.764548] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1377.764548] env[68285]: DEBUG nova.network.neutron [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1377.861684] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1377.864060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.020s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.864381] env[68285]: DEBUG nova.objects.instance [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lazy-loading 'resources' on Instance uuid c341075b-9d30-45db-9d83-f196bf90ecd3 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1377.881519] env[68285]: DEBUG oslo_vmware.api [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892658, 'name': PowerOffVM_Task, 'duration_secs': 0.378934} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.881819] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1377.882037] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1377.882306] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-280691b2-65dd-4ee5-b233-b8cd70399129 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.884542] env[68285]: INFO nova.scheduler.client.report [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Deleted allocations for instance d0f6ab86-e18d-42ac-bcf3-94eafb1939ff [ 1378.219873] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892656, 'name': PowerOffVM_Task, 'duration_secs': 1.226858} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.222882] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1378.223126] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1378.223374] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfc88759-741a-4355-a9e1-65fe99b43b41 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.229538] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.358878] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1378.359103] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1378.359295] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleting the datastore file [datastore1] a1dc8c86-523f-4474-9fea-9ccf35a36b3f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1378.359565] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1378.359845] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1378.360028] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleting the datastore file [datastore1] 852ab501-00a6-442b-804a-1bbf49a2be8c {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1378.360268] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5724dec-1d64-48a2-bc4c-b3555b778403 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.362099] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-491d1726-bc48-471c-ae9a-8c6f81d6e7a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.370137] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1378.370137] env[68285]: value = "task-2892662" [ 1378.370137] env[68285]: _type = "Task" [ 1378.370137] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.371351] env[68285]: DEBUG oslo_vmware.api [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1378.371351] env[68285]: value = "task-2892661" [ 1378.371351] env[68285]: _type = "Task" [ 1378.371351] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.382475] env[68285]: DEBUG oslo_vmware.api [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.385372] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.391892] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cb5f75fa-3dec-47b1-bc60-e4ca04a137e6 tempest-ServerActionsTestOtherA-1742002793 tempest-ServerActionsTestOtherA-1742002793-project-member] Lock "d0f6ab86-e18d-42ac-bcf3-94eafb1939ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.952s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1378.497545] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267d9034-cd80-4192-adf9-4d1bad40b1c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.505598] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c34f4a-db2a-476b-8ce0-da3c5ca4a6e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.538504] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b11b04-2a2c-485f-8d86-6abb8a249144 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.546765] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d950fdd5-92ed-42fb-a6de-24be40925325 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.561051] env[68285]: DEBUG nova.compute.provider_tree [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.724829] env[68285]: DEBUG nova.network.neutron [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10199287-90", "ovs_interfaceid": "10199287-9009-48cc-b97a-e94229f7d640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.731464] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.886146] env[68285]: DEBUG oslo_vmware.api [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161323} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.889116] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.889316] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1378.889493] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1378.889667] env[68285]: INFO nova.compute.manager [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Took 1.53 seconds to destroy the instance on the hypervisor. [ 1378.889902] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1378.890122] env[68285]: DEBUG oslo_vmware.api [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171344} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.890322] env[68285]: DEBUG nova.compute.manager [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1378.890421] env[68285]: DEBUG nova.network.neutron [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1378.891986] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.892177] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1378.892352] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1378.892516] env[68285]: INFO nova.compute.manager [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1378.892734] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1378.892911] env[68285]: DEBUG nova.compute.manager [-] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1378.893015] env[68285]: DEBUG nova.network.neutron [-] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1379.066678] env[68285]: DEBUG nova.scheduler.client.report [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1379.233583] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1379.233583] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.264221] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a57b608e1116452c9d8fc17dbc6a9afd',container_format='bare',created_at=2025-03-10T16:00:24Z,direct_url=,disk_format='vmdk',id=771ad50d-8fe8-4388-9936-92056e5c4163,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-402970141-shelved',owner='7106da1f6bcb4d0cb3dcad984b3adb33',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2025-03-10T16:00:38Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1379.264491] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1379.264651] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1379.264833] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1379.264976] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1379.265806] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1379.266439] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1379.267061] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1379.267061] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1379.267061] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1379.267211] env[68285]: DEBUG nova.virt.hardware [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1379.268063] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e990db8d-d103-45ae-b107-539fb515ca49 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.276911] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ebbb6c-3b68-4737-86d7-664cb3817554 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.296543] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:49:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10199287-9009-48cc-b97a-e94229f7d640', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1379.305389] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1379.305964] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1379.305964] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12d53a31-e015-4f09-acec-8cc734df1288 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.325897] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1379.325897] env[68285]: value = "task-2892663" [ 1379.325897] env[68285]: _type = "Task" [ 1379.325897] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.335597] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892663, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.432327] env[68285]: DEBUG nova.compute.manager [req-5b39b8d0-a207-4d26-86aa-17e4f202c725 req-c1269a36-c422-4dde-a02e-fbf66c888a16 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Received event network-vif-deleted-f4b82f26-eff6-4869-af1c-0bc1a3a4d606 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1379.432961] env[68285]: INFO nova.compute.manager [req-5b39b8d0-a207-4d26-86aa-17e4f202c725 req-c1269a36-c422-4dde-a02e-fbf66c888a16 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Neutron deleted interface f4b82f26-eff6-4869-af1c-0bc1a3a4d606; detaching it from the instance and deleting it from the info cache [ 1379.432961] env[68285]: DEBUG nova.network.neutron [req-5b39b8d0-a207-4d26-86aa-17e4f202c725 req-c1269a36-c422-4dde-a02e-fbf66c888a16 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.576922] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.713s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1379.601306] env[68285]: INFO nova.scheduler.client.report [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Deleted allocations for instance c341075b-9d30-45db-9d83-f196bf90ecd3 [ 1379.703730] env[68285]: DEBUG nova.compute.manager [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-changed-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1379.703730] env[68285]: DEBUG nova.compute.manager [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing instance network info cache due to event network-changed-10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1379.703730] env[68285]: DEBUG oslo_concurrency.lockutils [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] Acquiring lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.703730] env[68285]: DEBUG oslo_concurrency.lockutils [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] Acquired lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1379.703730] env[68285]: DEBUG nova.network.neutron [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Refreshing network info cache for port 10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1379.731227] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.836505] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892663, 'name': CreateVM_Task, 'duration_secs': 0.321714} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.836683] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1379.837382] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.837548] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1379.837936] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1379.838206] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a4af60d-235d-4981-bc6a-21d793d4204c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.843161] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1379.843161] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]529425f9-7dd0-7945-292b-6e393692a87d" [ 1379.843161] env[68285]: _type = "Task" [ 1379.843161] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.851676] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]529425f9-7dd0-7945-292b-6e393692a87d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.904272] env[68285]: DEBUG nova.network.neutron [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.912164] env[68285]: DEBUG nova.network.neutron [-] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.936290] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29eb0546-faec-49c1-8cf1-828088afb1a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.947617] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fdab4a-4b84-4bdd-a25b-881fd109a209 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.980097] env[68285]: DEBUG nova.compute.manager [req-5b39b8d0-a207-4d26-86aa-17e4f202c725 req-c1269a36-c422-4dde-a02e-fbf66c888a16 service nova] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Detach interface failed, port_id=f4b82f26-eff6-4869-af1c-0bc1a3a4d606, reason: Instance 852ab501-00a6-442b-804a-1bbf49a2be8c could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1380.111452] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d7076365-789e-4611-9b9b-64032ffe249b tempest-ServerRescueTestJSON-1302412218 tempest-ServerRescueTestJSON-1302412218-project-member] Lock "c341075b-9d30-45db-9d83-f196bf90ecd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.904s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.232652] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.356128] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1380.356391] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Processing image 771ad50d-8fe8-4388-9936-92056e5c4163 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1380.356635] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163/771ad50d-8fe8-4388-9936-92056e5c4163.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.356784] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquired lock "[datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163/771ad50d-8fe8-4388-9936-92056e5c4163.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1380.356975] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1380.357262] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74b93c76-35ca-4c44-b96e-a2a514e185b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.366423] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1380.366688] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1380.367397] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f8de961-c626-482c-84a9-0babfc1ff127 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.374487] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1380.374487] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5229a090-cdf4-bf97-14cc-c6b92a652921" [ 1380.374487] env[68285]: _type = "Task" [ 1380.374487] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.384894] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5229a090-cdf4-bf97-14cc-c6b92a652921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.408232] env[68285]: INFO nova.compute.manager [-] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Took 1.52 seconds to deallocate network for instance. [ 1380.415256] env[68285]: INFO nova.compute.manager [-] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Took 1.52 seconds to deallocate network for instance. [ 1380.624237] env[68285]: DEBUG nova.network.neutron [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updated VIF entry in instance network info cache for port 10199287-9009-48cc-b97a-e94229f7d640. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1380.624719] env[68285]: DEBUG nova.network.neutron [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [{"id": "10199287-9009-48cc-b97a-e94229f7d640", "address": "fa:16:3e:46:49:64", "network": {"id": "d1dca9bb-0362-4f7a-b507-f61ba3d1e31c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1501514314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7106da1f6bcb4d0cb3dcad984b3adb33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10199287-90", "ovs_interfaceid": "10199287-9009-48cc-b97a-e94229f7d640", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.733761] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.886011] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Preparing fetch location {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1380.886185] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Fetch image to [datastore1] OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b/OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b.vmdk {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1380.886399] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Downloading stream optimized image 771ad50d-8fe8-4388-9936-92056e5c4163 to [datastore1] OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b/OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b.vmdk on the data store datastore1 as vApp {{(pid=68285) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1380.886580] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Downloading image file data 771ad50d-8fe8-4388-9936-92056e5c4163 to the ESX as VM named 'OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b' {{(pid=68285) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1380.930624] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1380.931806] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.932086] env[68285]: DEBUG nova.objects.instance [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'resources' on Instance uuid a1dc8c86-523f-4474-9fea-9ccf35a36b3f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.933704] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1380.971366] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1380.971366] env[68285]: value = "resgroup-9" [ 1380.971366] env[68285]: _type = "ResourcePool" [ 1380.971366] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1380.971703] env[68285]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a0f0519e-9e61-41ce-85d0-42676d2641df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.994196] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease: (returnval){ [ 1380.994196] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524ad693-1a1b-3c0e-536b-8d2ace57b6f8" [ 1380.994196] env[68285]: _type = "HttpNfcLease" [ 1380.994196] env[68285]: } obtained for vApp import into resource pool (val){ [ 1380.994196] env[68285]: value = "resgroup-9" [ 1380.994196] env[68285]: _type = "ResourcePool" [ 1380.994196] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1380.994196] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the lease: (returnval){ [ 1380.994196] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524ad693-1a1b-3c0e-536b-8d2ace57b6f8" [ 1380.994196] env[68285]: _type = "HttpNfcLease" [ 1380.994196] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1381.001382] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1381.001382] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524ad693-1a1b-3c0e-536b-8d2ace57b6f8" [ 1381.001382] env[68285]: _type = "HttpNfcLease" [ 1381.001382] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1381.130043] env[68285]: DEBUG oslo_concurrency.lockutils [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] Releasing lock "refresh_cache-e3117ede-5d88-4e47-a32f-ea91b1ba83ec" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1381.130043] env[68285]: DEBUG nova.compute.manager [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Received event network-vif-deleted-07d808df-d1b1-42f4-8853-e537f5b160e0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1381.130043] env[68285]: INFO nova.compute.manager [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Neutron deleted interface 07d808df-d1b1-42f4-8853-e537f5b160e0; detaching it from the instance and deleting it from the info cache [ 1381.130043] env[68285]: DEBUG nova.network.neutron [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.235610] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.503333] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1381.503333] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524ad693-1a1b-3c0e-536b-8d2ace57b6f8" [ 1381.503333] env[68285]: _type = "HttpNfcLease" [ 1381.503333] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1381.503653] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1381.503653] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]524ad693-1a1b-3c0e-536b-8d2ace57b6f8" [ 1381.503653] env[68285]: _type = "HttpNfcLease" [ 1381.503653] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1381.504463] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8512b9f1-4f35-4410-940c-f8e25758da0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.516419] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525b623d-a248-3809-ef49-9a7e990a4ce0/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1381.516603] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525b623d-a248-3809-ef49-9a7e990a4ce0/disk-0.vmdk. {{(pid=68285) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1381.585531] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a8bf2efa-3b26-481a-86fd-38d61a3c2c01 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.605861] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112e7a7e-0c34-4fa6-a5f4-28be54d9ccf7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.612984] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae792530-0edd-43fc-9840-fc3d4f23c2c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.644598] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3041f43d-668a-4f82-8a24-f0c7dc8907ad {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.647319] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66a0c44-2d98-414a-b58b-8fb1ea23a7cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.656523] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205745f1-ca00-4b8f-aece-274c1a84fb46 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.663685] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bbe260-5c7a-4f02-bd82-cf48df1b458f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.683960] env[68285]: DEBUG nova.compute.provider_tree [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.692672] env[68285]: DEBUG nova.compute.manager [req-673470d9-75ff-4978-80d7-378676cb360c req-7a9df05e-60ac-47c9-a2ae-2580fc7f5ade service nova] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Detach interface failed, port_id=07d808df-d1b1-42f4-8853-e537f5b160e0, reason: Instance a1dc8c86-523f-4474-9fea-9ccf35a36b3f could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1381.734308] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.194376] env[68285]: DEBUG nova.scheduler.client.report [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1382.235417] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.701058] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.768s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.705762] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.772s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.706058] env[68285]: DEBUG nova.objects.instance [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lazy-loading 'resources' on Instance uuid 852ab501-00a6-442b-804a-1bbf49a2be8c {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1382.719794] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Completed reading data from the image iterator. {{(pid=68285) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1382.720969] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525b623d-a248-3809-ef49-9a7e990a4ce0/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1382.720969] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afb7db1-8bdb-4a71-8211-a3f03f8da86a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.730967] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525b623d-a248-3809-ef49-9a7e990a4ce0/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1382.731119] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525b623d-a248-3809-ef49-9a7e990a4ce0/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1382.732113] env[68285]: INFO nova.scheduler.client.report [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted allocations for instance a1dc8c86-523f-4474-9fea-9ccf35a36b3f [ 1382.733414] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-40e96f77-37eb-4194-a652-c686f4021d4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.741859] env[68285]: DEBUG oslo_vmware.api [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892657, 'name': ReconfigVM_Task, 'duration_secs': 5.758761} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.742773] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1382.743141] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Reconfigured VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1382.987747] env[68285]: DEBUG oslo_vmware.rw_handles [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525b623d-a248-3809-ef49-9a7e990a4ce0/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1382.990114] env[68285]: INFO nova.virt.vmwareapi.images [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Downloaded image file data 771ad50d-8fe8-4388-9936-92056e5c4163 [ 1382.990114] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348f5447-eebd-4bdc-8559-1b404b80b55b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.005687] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51979ebe-27dc-4725-8677-2f8440dfa15e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.008353] env[68285]: DEBUG nova.compute.manager [req-e1851f80-1052-48c8-a389-64efd07d6d16 req-0e5c468a-9448-4b38-b5e5-15a67581b81c service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-vif-deleted-b7625de4-8596-45da-a5c4-db92e344c774 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1383.008538] env[68285]: INFO nova.compute.manager [req-e1851f80-1052-48c8-a389-64efd07d6d16 req-0e5c468a-9448-4b38-b5e5-15a67581b81c service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Neutron deleted interface b7625de4-8596-45da-a5c4-db92e344c774; detaching it from the instance and deleting it from the info cache [ 1383.008806] env[68285]: DEBUG nova.network.neutron [req-e1851f80-1052-48c8-a389-64efd07d6d16 req-0e5c468a-9448-4b38-b5e5-15a67581b81c service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9", "address": "fa:16:3e:3d:dd:f5", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddaa0bce-71", "ovs_interfaceid": "ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.097758] env[68285]: INFO nova.virt.vmwareapi.images [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] The imported VM was unregistered [ 1383.103985] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Caching image {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1383.103985] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Creating directory with path [datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1383.103985] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dc8f3aa-199f-44fe-a1ca-1ab4496b182a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.128713] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Created directory with path [datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1383.128926] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b/OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b.vmdk to [datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163/771ad50d-8fe8-4388-9936-92056e5c4163.vmdk. {{(pid=68285) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1383.129288] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b03eab85-a5ef-4567-ad20-f561c33cdfa5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.136974] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1383.136974] env[68285]: value = "task-2892666" [ 1383.136974] env[68285]: _type = "Task" [ 1383.136974] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.144967] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.245628] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f1b56be9-19cc-4dbb-a915-63266a03ffbd tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a1dc8c86-523f-4474-9fea-9ccf35a36b3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.400s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.296269] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a9d5fb-8b3a-41f3-bf4c-2cc8c933fe7b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.303946] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ae778a-611b-454d-8006-85b95bc5b9fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.333926] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccf9ac2-f918-40ea-bab7-335a490cdf25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.341914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65e558c-d633-4ffa-ba7b-835dbc63c264 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.357434] env[68285]: DEBUG nova.compute.provider_tree [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.511647] env[68285]: DEBUG oslo_concurrency.lockutils [req-e1851f80-1052-48c8-a389-64efd07d6d16 req-0e5c468a-9448-4b38-b5e5-15a67581b81c service nova] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.511840] env[68285]: DEBUG oslo_concurrency.lockutils [req-e1851f80-1052-48c8-a389-64efd07d6d16 req-0e5c468a-9448-4b38-b5e5-15a67581b81c service nova] Acquired lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1383.513290] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270f479c-d605-4ebf-91cc-eecab1b46149 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.533660] env[68285]: DEBUG oslo_concurrency.lockutils [req-e1851f80-1052-48c8-a389-64efd07d6d16 req-0e5c468a-9448-4b38-b5e5-15a67581b81c service nova] Releasing lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1383.534915] env[68285]: WARNING nova.compute.manager [req-e1851f80-1052-48c8-a389-64efd07d6d16 req-0e5c468a-9448-4b38-b5e5-15a67581b81c service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Detach interface failed, port_id=b7625de4-8596-45da-a5c4-db92e344c774, reason: No device with interface-id b7625de4-8596-45da-a5c4-db92e344c774 exists on VM: nova.exception.NotFound: No device with interface-id b7625de4-8596-45da-a5c4-db92e344c774 exists on VM [ 1383.653067] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.860450] env[68285]: DEBUG nova.scheduler.client.report [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1383.983219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.983219] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1383.983506] env[68285]: DEBUG nova.network.neutron [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1384.156132] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.365225] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.397279] env[68285]: INFO nova.scheduler.client.report [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleted allocations for instance 852ab501-00a6-442b-804a-1bbf49a2be8c [ 1384.621385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.621385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.621385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.621385] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.621528] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.623989] env[68285]: INFO nova.compute.manager [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Terminating instance [ 1384.652825] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.909805] env[68285]: DEBUG oslo_concurrency.lockutils [None req-75fb3894-63d2-4ba1-aab0-017cd28d73df tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "852ab501-00a6-442b-804a-1bbf49a2be8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.781s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.932028] env[68285]: INFO nova.network.neutron [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Port ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1384.932416] env[68285]: DEBUG nova.network.neutron [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [{"id": "efe1cc65-a9a1-4768-81db-53da716df13a", "address": "fa:16:3e:91:d9:2c", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe1cc65-a9", "ovs_interfaceid": "efe1cc65-a9a1-4768-81db-53da716df13a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.035639] env[68285]: DEBUG nova.compute.manager [req-a2522f00-7691-4047-b7d5-72096b3d94e0 req-d2589033-a092-4fde-b10f-3add616a8397 service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-vif-deleted-ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1385.128164] env[68285]: DEBUG nova.compute.manager [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1385.128378] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1385.129308] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9caada4-d90c-451a-a717-8141cb3cefcf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.144789] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1385.152353] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ac279d7-7b62-4310-8dc9-6297ba9719c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.160943] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.162473] env[68285]: DEBUG oslo_vmware.api [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1385.162473] env[68285]: value = "task-2892667" [ 1385.162473] env[68285]: _type = "Task" [ 1385.162473] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.174716] env[68285]: DEBUG oslo_vmware.api [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.343136] env[68285]: DEBUG nova.compute.manager [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1385.436620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-9ddeb48e-ef72-4e6e-9058-d45ebde7583e" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1385.656303] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.674266] env[68285]: DEBUG oslo_vmware.api [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892667, 'name': PowerOffVM_Task, 'duration_secs': 0.30154} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.674605] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.674827] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.675123] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa9eb5bd-9f3c-4926-80e2-24491ade19a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.786099] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.786099] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.786099] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleting the datastore file [datastore2] 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.786304] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89b49af8-6f80-4b6a-a958-bf31550f9f5f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.792968] env[68285]: DEBUG oslo_vmware.api [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1385.792968] env[68285]: value = "task-2892670" [ 1385.792968] env[68285]: _type = "Task" [ 1385.792968] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.800945] env[68285]: DEBUG oslo_vmware.api [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892670, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.861428] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.861778] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.941859] env[68285]: DEBUG oslo_concurrency.lockutils [None req-2c986737-efd0-4f2d-88de-d32808713437 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-9ddeb48e-ef72-4e6e-9058-d45ebde7583e-b7625de4-8596-45da-a5c4-db92e344c774" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.815s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1386.155672] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.304892] env[68285]: DEBUG oslo_vmware.api [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892670, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174436} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.305075] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.305338] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1386.305458] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1386.305623] env[68285]: INFO nova.compute.manager [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1386.306294] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1386.306294] env[68285]: DEBUG nova.compute.manager [-] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1386.306294] env[68285]: DEBUG nova.network.neutron [-] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1386.367351] env[68285]: INFO nova.compute.claims [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1386.390183] env[68285]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 could not be found.", "detail": ""}} {{(pid=68285) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1386.390478] env[68285]: DEBUG nova.network.neutron [-] Unable to show port ddaa0bce-7138-44cd-a7fc-fdcf92f64cc9 as it no longer exists. {{(pid=68285) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1386.392295] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1386.392525] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1386.658878] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.874535] env[68285]: INFO nova.compute.resource_tracker [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating resource usage from migration 9afec92a-7c47-455f-83f4-e7e9fb939454 [ 1386.899217] env[68285]: DEBUG nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1386.987445] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80069ad-3d80-4d75-85fa-3e36b5ff0f0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.994860] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46572e5-f0d7-4fba-9145-2614c7776387 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.024738] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9978ef9-9f58-416e-8d59-df1510bd1e45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.031576] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371674bb-fe43-4f75-9ed3-b0e9f4fe909d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.046727] env[68285]: DEBUG nova.compute.provider_tree [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.063377] env[68285]: DEBUG nova.compute.manager [req-9be867eb-acf6-44a6-8e49-6fe0082a2cb8 req-a9e104f2-67af-440a-abfb-b1f32058256e service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Received event network-vif-deleted-efe1cc65-a9a1-4768-81db-53da716df13a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1387.063377] env[68285]: INFO nova.compute.manager [req-9be867eb-acf6-44a6-8e49-6fe0082a2cb8 req-a9e104f2-67af-440a-abfb-b1f32058256e service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Neutron deleted interface efe1cc65-a9a1-4768-81db-53da716df13a; detaching it from the instance and deleting it from the info cache [ 1387.063377] env[68285]: DEBUG nova.network.neutron [req-9be867eb-acf6-44a6-8e49-6fe0082a2cb8 req-a9e104f2-67af-440a-abfb-b1f32058256e service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.157908] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892666, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.754729} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.159140] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b/OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b.vmdk to [datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163/771ad50d-8fe8-4388-9936-92056e5c4163.vmdk. [ 1387.159140] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Cleaning up location [datastore1] OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1387.159140] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_698df1df-487a-4e8b-a883-fc8fb3fbf21b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1387.159140] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb63124a-e268-40ca-8521-10f582b8a73f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.164675] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1387.164675] env[68285]: value = "task-2892671" [ 1387.164675] env[68285]: _type = "Task" [ 1387.164675] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.172633] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.419485] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1387.432060] env[68285]: DEBUG nova.network.neutron [-] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.549857] env[68285]: DEBUG nova.scheduler.client.report [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1387.565106] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf60d12c-467d-472c-859b-40fbd191693c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.574968] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c61dc2-e0dd-461d-ac0f-3ddfa2ad753e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.601708] env[68285]: DEBUG nova.compute.manager [req-9be867eb-acf6-44a6-8e49-6fe0082a2cb8 req-a9e104f2-67af-440a-abfb-b1f32058256e service nova] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Detach interface failed, port_id=efe1cc65-a9a1-4768-81db-53da716df13a, reason: Instance 9ddeb48e-ef72-4e6e-9058-d45ebde7583e could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1387.675063] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033164} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.675567] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1387.675567] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Releasing lock "[datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163/771ad50d-8fe8-4388-9936-92056e5c4163.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1387.675774] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163/771ad50d-8fe8-4388-9936-92056e5c4163.vmdk to [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1387.675927] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7cbd1bbf-4cb7-4db1-a51d-a7a9de122499 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.683039] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1387.683039] env[68285]: value = "task-2892672" [ 1387.683039] env[68285]: _type = "Task" [ 1387.683039] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.690533] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.934729] env[68285]: INFO nova.compute.manager [-] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Took 1.63 seconds to deallocate network for instance. [ 1388.055564] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.193s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.055857] env[68285]: INFO nova.compute.manager [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Migrating [ 1388.063247] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.643s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1388.064891] env[68285]: INFO nova.compute.claims [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1388.194326] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892672, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.441200] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.577727] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.577918] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1388.578042] env[68285]: DEBUG nova.network.neutron [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1388.695469] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892672, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.195232] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892672, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.200025] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe7a712-da3a-4f2a-a4f1-738d01dec2c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.208052] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cc3c15-bfef-42fd-b197-9bba6485aba1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.243259] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf896339-4242-411e-99ef-219235c7132a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.251971] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b7e425-2719-41c4-8cd3-81313fa5dfee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.268499] env[68285]: DEBUG nova.compute.provider_tree [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.344240] env[68285]: DEBUG nova.network.neutron [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.693773] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892672, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.772192] env[68285]: DEBUG nova.scheduler.client.report [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1389.848235] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1389.899413] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.899683] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.899895] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "feda1a98-3086-43a6-a887-f4d1602ca8ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.900144] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.900359] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.902790] env[68285]: INFO nova.compute.manager [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Terminating instance [ 1390.196522] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892672, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.277729] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.215s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1390.278288] env[68285]: DEBUG nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1390.281029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.840s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.281258] env[68285]: DEBUG nova.objects.instance [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'resources' on Instance uuid 9ddeb48e-ef72-4e6e-9058-d45ebde7583e {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1390.406472] env[68285]: DEBUG nova.compute.manager [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1390.406698] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1390.407601] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dbea1b-55fc-478b-930c-38bc6bac5e8f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.415603] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1390.415829] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c2ad4a2-169c-4efd-84a0-b0c8964015a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.421353] env[68285]: DEBUG oslo_vmware.api [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1390.421353] env[68285]: value = "task-2892673" [ 1390.421353] env[68285]: _type = "Task" [ 1390.421353] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.429231] env[68285]: DEBUG oslo_vmware.api [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.695291] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892672, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.566006} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.695591] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/771ad50d-8fe8-4388-9936-92056e5c4163/771ad50d-8fe8-4388-9936-92056e5c4163.vmdk to [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.696457] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38eb5d2d-4794-4cca-8937-e881ffdefd45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.717768] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.718025] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-626d8f65-036d-4fc7-a55b-5e7275510ac3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.736337] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1390.736337] env[68285]: value = "task-2892674" [ 1390.736337] env[68285]: _type = "Task" [ 1390.736337] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.743396] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892674, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.784026] env[68285]: DEBUG nova.compute.utils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1390.788207] env[68285]: DEBUG nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1390.788379] env[68285]: DEBUG nova.network.neutron [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1390.831443] env[68285]: DEBUG nova.policy [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '692b7f9e18974b8c83f30adb9dbfe8a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c272180eed81480fabd7e6d4dacc2613', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1390.887504] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a116f8bb-a609-4ce1-ac53-b8689ff16525 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.895674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37841192-2727-40c4-a2fd-51559d422833 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.929625] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bbdb1d-863c-4f93-84f1-25c14a6a8855 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.937019] env[68285]: DEBUG oslo_vmware.api [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892673, 'name': PowerOffVM_Task, 'duration_secs': 0.271314} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.939093] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1390.939300] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1390.939586] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7399b4ed-8fe8-4d16-8f69-9c2dd9c311fd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.941980] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83660c5-5083-4dc6-8a17-4abb4fa2173c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.955618] env[68285]: DEBUG nova.compute.provider_tree [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.033531] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1391.033781] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1391.034009] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleting the datastore file [datastore2] feda1a98-3086-43a6-a887-f4d1602ca8ee {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1391.034350] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e29cdd98-e2a8-497f-abeb-9556bbbbaff4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.046209] env[68285]: DEBUG oslo_vmware.api [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1391.046209] env[68285]: value = "task-2892676" [ 1391.046209] env[68285]: _type = "Task" [ 1391.046209] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.054852] env[68285]: DEBUG oslo_vmware.api [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892676, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.222102] env[68285]: DEBUG nova.network.neutron [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Successfully created port: 4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1391.246935] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892674, 'name': ReconfigVM_Task, 'duration_secs': 0.356316} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.247271] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Reconfigured VM instance instance-00000072 to attach disk [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec/e3117ede-5d88-4e47-a32f-ea91b1ba83ec.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.247943] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13549ea7-64a0-4c59-8c3a-250e7aeb9627 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.255716] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1391.255716] env[68285]: value = "task-2892677" [ 1391.255716] env[68285]: _type = "Task" [ 1391.255716] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.264469] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892677, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.288998] env[68285]: DEBUG nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1391.365954] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45a4df7-d787-47cc-ab77-0e2236175867 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.388768] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance 'd1446290-95ce-4e87-85df-7cc69bb57ce7' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1391.458581] env[68285]: DEBUG nova.scheduler.client.report [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1391.556211] env[68285]: DEBUG oslo_vmware.api [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892676, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.765280] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892677, 'name': Rename_Task, 'duration_secs': 0.200872} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.765606] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.765857] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddef5f4c-f6b9-4f02-94ad-d40f38bb27d7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.771557] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1391.771557] env[68285]: value = "task-2892678" [ 1391.771557] env[68285]: _type = "Task" [ 1391.771557] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.778832] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.896218] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1391.896567] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7934576-3d07-4e64-bcc2-d827c417eb89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.903433] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1391.903433] env[68285]: value = "task-2892679" [ 1391.903433] env[68285]: _type = "Task" [ 1391.903433] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.912591] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.965039] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.683s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.990318] env[68285]: INFO nova.scheduler.client.report [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted allocations for instance 9ddeb48e-ef72-4e6e-9058-d45ebde7583e [ 1392.059170] env[68285]: DEBUG oslo_vmware.api [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892676, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.703807} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.059170] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1392.059170] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1392.059170] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1392.059421] env[68285]: INFO nova.compute.manager [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1392.059603] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1392.059767] env[68285]: DEBUG nova.compute.manager [-] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1392.059865] env[68285]: DEBUG nova.network.neutron [-] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1392.282161] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892678, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.300299] env[68285]: DEBUG nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1392.329173] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1392.329453] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1392.329624] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1392.329831] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1392.329987] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1392.330192] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1392.330465] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1392.330634] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1392.330804] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1392.330972] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1392.331167] env[68285]: DEBUG nova.virt.hardware [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1392.332179] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40cb6f8-7f79-4389-8262-5478b0d7aeb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.340630] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0fc772-a4cd-4487-9f54-24793c2b2404 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.347343] env[68285]: DEBUG nova.compute.manager [req-769f6c89-663f-49a3-990f-4ac99e562664 req-a357ca3a-2754-4d2b-8ad9-36640e21c7b6 service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Received event network-vif-deleted-4ee7857e-7e56-4be9-bc5b-a3963713b734 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1392.347578] env[68285]: INFO nova.compute.manager [req-769f6c89-663f-49a3-990f-4ac99e562664 req-a357ca3a-2754-4d2b-8ad9-36640e21c7b6 service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Neutron deleted interface 4ee7857e-7e56-4be9-bc5b-a3963713b734; detaching it from the instance and deleting it from the info cache [ 1392.347694] env[68285]: DEBUG nova.network.neutron [req-769f6c89-663f-49a3-990f-4ac99e562664 req-a357ca3a-2754-4d2b-8ad9-36640e21c7b6 service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.412675] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892679, 'name': PowerOffVM_Task, 'duration_secs': 0.198464} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.413228] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1392.413362] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance 'd1446290-95ce-4e87-85df-7cc69bb57ce7' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1392.497445] env[68285]: DEBUG oslo_concurrency.lockutils [None req-edcbbb77-53be-4b13-b6bd-df98f1e06633 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "9ddeb48e-ef72-4e6e-9058-d45ebde7583e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.876s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.649602] env[68285]: DEBUG nova.compute.manager [req-801f945e-8fd6-415d-8c4a-82bba66ee1fc req-5b3c9a3f-e451-4e9d-886a-ce52faa8e920 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-vif-plugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1392.649788] env[68285]: DEBUG oslo_concurrency.lockutils [req-801f945e-8fd6-415d-8c4a-82bba66ee1fc req-5b3c9a3f-e451-4e9d-886a-ce52faa8e920 service nova] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.650019] env[68285]: DEBUG oslo_concurrency.lockutils [req-801f945e-8fd6-415d-8c4a-82bba66ee1fc req-5b3c9a3f-e451-4e9d-886a-ce52faa8e920 service nova] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.650172] env[68285]: DEBUG oslo_concurrency.lockutils [req-801f945e-8fd6-415d-8c4a-82bba66ee1fc req-5b3c9a3f-e451-4e9d-886a-ce52faa8e920 service nova] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.650342] env[68285]: DEBUG nova.compute.manager [req-801f945e-8fd6-415d-8c4a-82bba66ee1fc req-5b3c9a3f-e451-4e9d-886a-ce52faa8e920 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] No waiting events found dispatching network-vif-plugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1392.650506] env[68285]: WARNING nova.compute.manager [req-801f945e-8fd6-415d-8c4a-82bba66ee1fc req-5b3c9a3f-e451-4e9d-886a-ce52faa8e920 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received unexpected event network-vif-plugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 for instance with vm_state building and task_state spawning. [ 1392.734761] env[68285]: DEBUG nova.network.neutron [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Successfully updated port: 4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1392.782909] env[68285]: DEBUG oslo_vmware.api [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892678, 'name': PowerOnVM_Task, 'duration_secs': 0.559488} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.783594] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1392.797864] env[68285]: DEBUG nova.network.neutron [-] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.850180] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f0dcbec-7736-4f71-a687-3f3b94ddbfe3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.860496] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561bd41d-6347-4d5a-958d-7c985241f7cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.886936] env[68285]: DEBUG nova.compute.manager [req-769f6c89-663f-49a3-990f-4ac99e562664 req-a357ca3a-2754-4d2b-8ad9-36640e21c7b6 service nova] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Detach interface failed, port_id=4ee7857e-7e56-4be9-bc5b-a3963713b734, reason: Instance feda1a98-3086-43a6-a887-f4d1602ca8ee could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1392.888103] env[68285]: DEBUG nova.compute.manager [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1392.888913] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89c0960-6f07-47c8-862b-c2fb074a6003 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.919732] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1392.920153] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1392.920153] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1392.920304] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1392.920446] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1392.920597] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1392.920798] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1392.920955] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1392.921148] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1392.921341] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1392.921511] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1392.926573] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-215178b8-baff-444d-b027-4a45f4a09346 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.943124] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1392.943124] env[68285]: value = "task-2892680" [ 1392.943124] env[68285]: _type = "Task" [ 1392.943124] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.952676] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892680, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.237881] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.237943] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1393.238063] env[68285]: DEBUG nova.network.neutron [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1393.300662] env[68285]: INFO nova.compute.manager [-] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Took 1.24 seconds to deallocate network for instance. [ 1393.406882] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7684bf0d-c201-408a-9701-1f53d1a32b4b tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.065s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.453313] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892680, 'name': ReconfigVM_Task, 'duration_secs': 0.415584} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.453745] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance 'd1446290-95ce-4e87-85df-7cc69bb57ce7' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1393.777178] env[68285]: DEBUG nova.network.neutron [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1393.807197] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.808359] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.808359] env[68285]: DEBUG nova.objects.instance [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lazy-loading 'resources' on Instance uuid feda1a98-3086-43a6-a887-f4d1602ca8ee {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1393.921713] env[68285]: DEBUG nova.network.neutron [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.960404] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1393.961117] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.961117] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1393.961117] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.961288] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1393.961324] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1393.961526] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1393.961688] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1393.961854] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1393.962028] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1393.962208] env[68285]: DEBUG nova.virt.hardware [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1393.967811] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Reconfiguring VM instance instance-00000056 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1393.968285] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88ff8736-f029-4202-a923-e5c20ff01ce3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.987815] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1393.987815] env[68285]: value = "task-2892681" [ 1393.987815] env[68285]: _type = "Task" [ 1393.987815] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.996247] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892681, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.407534] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221dac0e-5ef6-4621-b821-4a13e5a8ece4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.415735] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acd8c27-8ca6-483b-8684-512e9dea069b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.448335] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1394.448647] env[68285]: DEBUG nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Instance network_info: |[{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1394.449870] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:0f:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82ca17df-257e-40e6-9ec9-310ed6f05ccb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cd7aff5-25ff-4491-b7b0-a079248d54f4', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1394.457364] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating folder: Project (c272180eed81480fabd7e6d4dacc2613). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1394.458143] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03efe9e-3bc9-4d24-b9f0-f65baeccecd8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.460862] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1cabf8f-6147-426d-bbbb-c6fcc938b8b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.467685] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2aa3437-3645-42cc-b3ba-065ca68cf0de {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.472598] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created folder: Project (c272180eed81480fabd7e6d4dacc2613) in parent group-v580775. [ 1394.472790] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating folder: Instances. Parent ref: group-v581101. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1394.473448] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5133d021-13cc-4a3d-a28b-69402053fcb2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.483198] env[68285]: DEBUG nova.compute.provider_tree [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1394.494642] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created folder: Instances in parent group-v581101. [ 1394.494914] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1394.497921] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1394.498441] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892681, 'name': ReconfigVM_Task, 'duration_secs': 0.164004} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.498639] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e55d6335-871f-47c8-a61c-2c1254a9f74e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.512725] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Reconfigured VM instance instance-00000056 to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1394.513746] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6eccd8-fc72-4efe-8da9-15ec345670ee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.535717] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] d1446290-95ce-4e87-85df-7cc69bb57ce7/d1446290-95ce-4e87-85df-7cc69bb57ce7.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1394.536838] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc7d1670-b89f-416a-8c2f-2af87aeaed4f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.549742] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1394.549742] env[68285]: value = "task-2892684" [ 1394.549742] env[68285]: _type = "Task" [ 1394.549742] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.554901] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1394.554901] env[68285]: value = "task-2892685" [ 1394.554901] env[68285]: _type = "Task" [ 1394.554901] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.560832] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892684, 'name': CreateVM_Task} progress is 15%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.565285] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892685, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.675399] env[68285]: DEBUG nova.compute.manager [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1394.675475] env[68285]: DEBUG nova.compute.manager [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing instance network info cache due to event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1394.675705] env[68285]: DEBUG oslo_concurrency.lockutils [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] Acquiring lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.675881] env[68285]: DEBUG oslo_concurrency.lockutils [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] Acquired lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.676075] env[68285]: DEBUG nova.network.neutron [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1394.677559] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.677775] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.678018] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.678230] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.678408] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.681262] env[68285]: INFO nova.compute.manager [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Terminating instance [ 1394.988511] env[68285]: DEBUG nova.scheduler.client.report [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1395.012119] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.012340] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.062327] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892684, 'name': CreateVM_Task, 'duration_secs': 0.348559} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.062780] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1395.063524] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.063683] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1395.064029] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1395.066966] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72aa473-de95-4b37-b454-4de92e8f8a0a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.068464] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892685, 'name': ReconfigVM_Task, 'duration_secs': 0.283856} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.068708] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Reconfigured VM instance instance-00000056 to attach disk [datastore1] d1446290-95ce-4e87-85df-7cc69bb57ce7/d1446290-95ce-4e87-85df-7cc69bb57ce7.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1395.068947] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance 'd1446290-95ce-4e87-85df-7cc69bb57ce7' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1395.075199] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1395.075199] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5238c626-10e3-7b19-ddd1-c0857be9c8ba" [ 1395.075199] env[68285]: _type = "Task" [ 1395.075199] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.082818] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5238c626-10e3-7b19-ddd1-c0857be9c8ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.185481] env[68285]: DEBUG nova.compute.manager [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1395.185651] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1395.186553] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bf9085-af1b-4acc-9b98-9b4076984539 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.193743] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1395.193971] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4afec53c-2f03-494d-a940-0c4ea338d2bb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.199512] env[68285]: DEBUG oslo_vmware.api [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1395.199512] env[68285]: value = "task-2892686" [ 1395.199512] env[68285]: _type = "Task" [ 1395.199512] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.207036] env[68285]: DEBUG oslo_vmware.api [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.494293] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1395.515397] env[68285]: DEBUG nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1395.518914] env[68285]: INFO nova.scheduler.client.report [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleted allocations for instance feda1a98-3086-43a6-a887-f4d1602ca8ee [ 1395.577500] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7963b0dd-f8c8-47f9-b911-05cca6131785 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.603871] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac482566-dc7a-480f-a41c-11f2322eb7e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.606295] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5238c626-10e3-7b19-ddd1-c0857be9c8ba, 'name': SearchDatastore_Task, 'duration_secs': 0.01212} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.606574] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1395.606801] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1395.607042] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.607196] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1395.607369] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.607943] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbbf79c9-c43c-467c-b44c-1d5c4aa46212 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.624724] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance 'd1446290-95ce-4e87-85df-7cc69bb57ce7' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1395.628789] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.628969] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1395.629856] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf6fc414-407d-4dde-b745-d9f4e9a59ee5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.635474] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1395.635474] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]526168a8-f84b-4629-24d5-3fdf35c63a29" [ 1395.635474] env[68285]: _type = "Task" [ 1395.635474] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.645952] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526168a8-f84b-4629-24d5-3fdf35c63a29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.709406] env[68285]: DEBUG oslo_vmware.api [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892686, 'name': PowerOffVM_Task, 'duration_secs': 0.199351} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.711536] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1395.711536] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1395.711536] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-460f7932-b3b4-4cdc-bb81-088a561c36fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.768040] env[68285]: DEBUG nova.network.neutron [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updated VIF entry in instance network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1395.768040] env[68285]: DEBUG nova.network.neutron [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.776724] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1395.776939] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1395.777131] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleting the datastore file [datastore1] e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.777705] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88b095b8-98b2-40d2-9f84-d6d4e5c92a9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.783935] env[68285]: DEBUG oslo_vmware.api [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for the task: (returnval){ [ 1395.783935] env[68285]: value = "task-2892688" [ 1395.783935] env[68285]: _type = "Task" [ 1395.783935] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.791957] env[68285]: DEBUG oslo_vmware.api [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.029138] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f6e747c2-2dfb-4ffb-b17b-00c3ca5f4e63 tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "feda1a98-3086-43a6-a887-f4d1602ca8ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.129s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.036973] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.036973] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.038702] env[68285]: INFO nova.compute.claims [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.145995] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]526168a8-f84b-4629-24d5-3fdf35c63a29, 'name': SearchDatastore_Task, 'duration_secs': 0.010933} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.146766] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6e93b03-c581-4b8a-87d5-c8e49838cbe1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.151854] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1396.151854] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5241fd52-99c6-6f94-273b-b07737e21b06" [ 1396.151854] env[68285]: _type = "Task" [ 1396.151854] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.159683] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5241fd52-99c6-6f94-273b-b07737e21b06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.163008] env[68285]: DEBUG nova.network.neutron [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Port dbe7fd71-a38e-450c-a4ef-497eaf455ff0 binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1396.268953] env[68285]: DEBUG oslo_concurrency.lockutils [req-b62ca2a2-84d7-4e2f-b24b-2847dd181fd3 req-8ae6806a-14ab-4968-9b82-65520f99821a service nova] Releasing lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.294156] env[68285]: DEBUG oslo_vmware.api [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Task: {'id': task-2892688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132058} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.294284] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1396.294466] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1396.294636] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1396.294803] env[68285]: INFO nova.compute.manager [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1396.295052] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1396.295242] env[68285]: DEBUG nova.compute.manager [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1396.295337] env[68285]: DEBUG nova.network.neutron [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1396.520678] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "801f524e-28b5-4452-b880-0fc30d3c5eef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.520955] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.521240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "801f524e-28b5-4452-b880-0fc30d3c5eef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.521461] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.521672] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.524043] env[68285]: INFO nova.compute.manager [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Terminating instance [ 1396.662946] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5241fd52-99c6-6f94-273b-b07737e21b06, 'name': SearchDatastore_Task, 'duration_secs': 0.010381} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.663349] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.663702] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1396.664079] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b348f8a-cd55-4465-ae74-6a52953624d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.680162] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1396.680162] env[68285]: value = "task-2892689" [ 1396.680162] env[68285]: _type = "Task" [ 1396.680162] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.691505] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.709867] env[68285]: DEBUG nova.compute.manager [req-ea5f3be1-030f-4263-a0e8-9181639c6332 req-29087736-475f-4ec3-8bc7-24b173053985 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Received event network-vif-deleted-10199287-9009-48cc-b97a-e94229f7d640 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1396.710190] env[68285]: INFO nova.compute.manager [req-ea5f3be1-030f-4263-a0e8-9181639c6332 req-29087736-475f-4ec3-8bc7-24b173053985 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Neutron deleted interface 10199287-9009-48cc-b97a-e94229f7d640; detaching it from the instance and deleting it from the info cache [ 1396.710342] env[68285]: DEBUG nova.network.neutron [req-ea5f3be1-030f-4263-a0e8-9181639c6332 req-29087736-475f-4ec3-8bc7-24b173053985 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.028121] env[68285]: DEBUG nova.compute.manager [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1397.028121] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.029452] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4913528f-a3c9-4259-ab2c-879fac4bce75 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.037996] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1397.038451] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af2198da-5a71-484c-b763-d34874b7a7dc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.044752] env[68285]: DEBUG oslo_vmware.api [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1397.044752] env[68285]: value = "task-2892690" [ 1397.044752] env[68285]: _type = "Task" [ 1397.044752] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.058058] env[68285]: DEBUG oslo_vmware.api [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.151021] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecf4643-29a0-4330-8052-3fba7649173e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.159355] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-909d42b2-5358-4166-9c1b-baf8821d3140 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.198968] env[68285]: DEBUG nova.network.neutron [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.207012] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7107e1e-f63f-4d8c-910f-1ac332236c78 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.213786] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1397.214043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1397.214265] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.216937] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-debf8bc6-3fc5-4fa2-8700-af860ddec21e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.225460] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469606} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.225946] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1397.226232] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1397.227667] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb79f76-f336-431e-9001-124e2d23e09e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.236102] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a4471de-d9bc-4d7c-8641-04700ec60fc9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.241842] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fb3609-bb46-4b68-a887-dc035360e047 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.266783] env[68285]: DEBUG nova.compute.provider_tree [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.270563] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1397.270563] env[68285]: value = "task-2892691" [ 1397.270563] env[68285]: _type = "Task" [ 1397.270563] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.279677] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.286613] env[68285]: DEBUG nova.compute.manager [req-ea5f3be1-030f-4263-a0e8-9181639c6332 req-29087736-475f-4ec3-8bc7-24b173053985 service nova] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Detach interface failed, port_id=10199287-9009-48cc-b97a-e94229f7d640, reason: Instance e3117ede-5d88-4e47-a32f-ea91b1ba83ec could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1397.555330] env[68285]: DEBUG oslo_vmware.api [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892690, 'name': PowerOffVM_Task, 'duration_secs': 0.259639} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.555610] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.555777] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1397.556032] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d47641b-f8d5-478f-ab2c-a374ce7436fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.619513] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1397.619774] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1397.619994] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleting the datastore file [datastore2] 801f524e-28b5-4452-b880-0fc30d3c5eef {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1397.620307] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b50a9a0a-821b-421d-881f-6ce13c6f9788 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.627053] env[68285]: DEBUG oslo_vmware.api [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for the task: (returnval){ [ 1397.627053] env[68285]: value = "task-2892693" [ 1397.627053] env[68285]: _type = "Task" [ 1397.627053] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.635583] env[68285]: DEBUG oslo_vmware.api [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892693, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.704414] env[68285]: INFO nova.compute.manager [-] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Took 1.41 seconds to deallocate network for instance. [ 1397.771340] env[68285]: DEBUG nova.scheduler.client.report [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1397.784649] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074257} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.785132] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1397.785729] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8e23df-fbef-4d57-a4b0-18f218a88ae5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.810245] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.810996] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a1839bc-9a92-4e03-8210-528aafc156ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.829345] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1397.829345] env[68285]: value = "task-2892694" [ 1397.829345] env[68285]: _type = "Task" [ 1397.829345] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.838737] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892694, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.137169] env[68285]: DEBUG oslo_vmware.api [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Task: {'id': task-2892693, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181445} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.137540] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1398.137609] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1398.137816] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1398.138048] env[68285]: INFO nova.compute.manager [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1398.138301] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1398.138502] env[68285]: DEBUG nova.compute.manager [-] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1398.138597] env[68285]: DEBUG nova.network.neutron [-] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1398.211162] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.246897] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.247099] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.247280] env[68285]: DEBUG nova.network.neutron [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1398.276715] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.277346] env[68285]: DEBUG nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1398.280246] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.069s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.280461] env[68285]: DEBUG nova.objects.instance [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lazy-loading 'resources' on Instance uuid e3117ede-5d88-4e47-a32f-ea91b1ba83ec {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1398.339520] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892694, 'name': ReconfigVM_Task, 'duration_secs': 0.318769} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.339815] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfigured VM instance instance-00000079 to attach disk [datastore2] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1398.340529] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c594bfc9-0ccf-483d-9cf8-22be6288619c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.347015] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1398.347015] env[68285]: value = "task-2892695" [ 1398.347015] env[68285]: _type = "Task" [ 1398.347015] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.354823] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892695, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.744345] env[68285]: DEBUG nova.compute.manager [req-1f3a47c9-9c27-4fe9-988e-f90413da1dae req-fd26f845-65a2-4db5-a6b0-a5187462c9fe service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Received event network-vif-deleted-1d10105d-1754-49c2-9593-7de22107732e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1398.744553] env[68285]: INFO nova.compute.manager [req-1f3a47c9-9c27-4fe9-988e-f90413da1dae req-fd26f845-65a2-4db5-a6b0-a5187462c9fe service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Neutron deleted interface 1d10105d-1754-49c2-9593-7de22107732e; detaching it from the instance and deleting it from the info cache [ 1398.744728] env[68285]: DEBUG nova.network.neutron [req-1f3a47c9-9c27-4fe9-988e-f90413da1dae req-fd26f845-65a2-4db5-a6b0-a5187462c9fe service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.783423] env[68285]: DEBUG nova.compute.utils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1398.790296] env[68285]: DEBUG nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1398.790479] env[68285]: DEBUG nova.network.neutron [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1398.835297] env[68285]: DEBUG nova.policy [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1398.856912] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892695, 'name': Rename_Task, 'duration_secs': 0.150293} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.859383] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1398.859820] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ed74640-7b9f-429c-beb0-f75314bc19a8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.866845] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1398.866845] env[68285]: value = "task-2892696" [ 1398.866845] env[68285]: _type = "Task" [ 1398.866845] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.878620] env[68285]: DEBUG nova.network.neutron [-] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.880059] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892696, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.886342] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747547af-abfa-465d-b2a5-80de340add5c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.897872] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c741932d-fd98-44f7-bd99-8945613664d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.930718] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24daff7b-0314-4a2b-af5f-5f029d88e76d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.941350] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bd516b-b944-41dc-a163-3377a780eebd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.955113] env[68285]: DEBUG nova.compute.provider_tree [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.053358] env[68285]: DEBUG nova.network.neutron [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.110496] env[68285]: DEBUG nova.network.neutron [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Successfully created port: 6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1399.247397] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b184332d-53a5-4f86-a05f-c69f094307f7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.257032] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08a3499-084a-4931-8007-e99ecf23acd1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.285443] env[68285]: DEBUG nova.compute.manager [req-1f3a47c9-9c27-4fe9-988e-f90413da1dae req-fd26f845-65a2-4db5-a6b0-a5187462c9fe service nova] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Detach interface failed, port_id=1d10105d-1754-49c2-9593-7de22107732e, reason: Instance 801f524e-28b5-4452-b880-0fc30d3c5eef could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1399.293261] env[68285]: DEBUG nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1399.376800] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892696, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.381700] env[68285]: INFO nova.compute.manager [-] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Took 1.24 seconds to deallocate network for instance. [ 1399.458030] env[68285]: DEBUG nova.scheduler.client.report [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1399.555976] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1399.878242] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892696, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.887268] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1399.963821] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.683s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1399.966033] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.079s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1399.966033] env[68285]: DEBUG nova.objects.instance [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lazy-loading 'resources' on Instance uuid 801f524e-28b5-4452-b880-0fc30d3c5eef {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1399.987667] env[68285]: INFO nova.scheduler.client.report [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Deleted allocations for instance e3117ede-5d88-4e47-a32f-ea91b1ba83ec [ 1400.076898] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b80a353-2553-449f-a4bc-c93972962b98 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.095643] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87a8d9c-3c56-4736-9d87-4c7a365438e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.102475] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance 'd1446290-95ce-4e87-85df-7cc69bb57ce7' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1400.303433] env[68285]: DEBUG nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1400.329497] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1400.329746] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1400.329904] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1400.330096] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1400.330245] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1400.330395] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1400.330598] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1400.330760] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1400.330925] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1400.331118] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1400.331299] env[68285]: DEBUG nova.virt.hardware [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1400.332168] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7c1fd1-0513-43aa-9c4d-408f4b8b72cc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.340341] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0027f826-cc8c-4826-8a6b-9d78c5a3d975 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.375664] env[68285]: DEBUG oslo_vmware.api [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892696, 'name': PowerOnVM_Task, 'duration_secs': 1.029613} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.375926] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1400.376218] env[68285]: INFO nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1400.376403] env[68285]: DEBUG nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1400.377136] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e18f17c-1de0-4bd7-927d-8a84f2ec7680 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.473112] env[68285]: DEBUG nova.compute.manager [req-6ccb7d2f-b338-4fc7-9357-f72156cbf848 req-6f3857d8-980a-48ce-9ee7-075e4441df7b service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-vif-plugged-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1400.473380] env[68285]: DEBUG oslo_concurrency.lockutils [req-6ccb7d2f-b338-4fc7-9357-f72156cbf848 req-6f3857d8-980a-48ce-9ee7-075e4441df7b service nova] Acquiring lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1400.473583] env[68285]: DEBUG oslo_concurrency.lockutils [req-6ccb7d2f-b338-4fc7-9357-f72156cbf848 req-6f3857d8-980a-48ce-9ee7-075e4441df7b service nova] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.473747] env[68285]: DEBUG oslo_concurrency.lockutils [req-6ccb7d2f-b338-4fc7-9357-f72156cbf848 req-6f3857d8-980a-48ce-9ee7-075e4441df7b service nova] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.473914] env[68285]: DEBUG nova.compute.manager [req-6ccb7d2f-b338-4fc7-9357-f72156cbf848 req-6f3857d8-980a-48ce-9ee7-075e4441df7b service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] No waiting events found dispatching network-vif-plugged-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1400.474094] env[68285]: WARNING nova.compute.manager [req-6ccb7d2f-b338-4fc7-9357-f72156cbf848 req-6f3857d8-980a-48ce-9ee7-075e4441df7b service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received unexpected event network-vif-plugged-6228c66e-e1b1-4b17-bdb2-ae945380a77a for instance with vm_state building and task_state spawning. [ 1400.494435] env[68285]: DEBUG oslo_concurrency.lockutils [None req-993862ff-c14c-4667-87e7-4f5e581a32e3 tempest-ServerActionsTestOtherB-158934431 tempest-ServerActionsTestOtherB-158934431-project-member] Lock "e3117ede-5d88-4e47-a32f-ea91b1ba83ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.817s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.547211] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699e9949-da98-4f38-8e8d-4778d88fb986 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.555116] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290d09c7-087d-4b5a-b509-c72cc24feb82 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.561519] env[68285]: DEBUG nova.network.neutron [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Successfully updated port: 6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1400.588013] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.588165] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1400.588315] env[68285]: DEBUG nova.network.neutron [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1400.589808] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6531aaf-095e-46d4-b854-d03cd2ac14b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.598803] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fa18ba-6076-41fc-aea8-38879085155f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.614139] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.614826] env[68285]: DEBUG nova.compute.provider_tree [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.615997] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b29a294-24c2-4d5d-b09b-eb894e6f2dcf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.621975] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1400.621975] env[68285]: value = "task-2892697" [ 1400.621975] env[68285]: _type = "Task" [ 1400.621975] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.630475] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.892761] env[68285]: INFO nova.compute.manager [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Took 13.49 seconds to build instance. [ 1401.119048] env[68285]: DEBUG nova.scheduler.client.report [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1401.123636] env[68285]: DEBUG nova.network.neutron [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1401.135839] env[68285]: DEBUG oslo_vmware.api [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892697, 'name': PowerOnVM_Task, 'duration_secs': 0.374521} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.136939] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.137140] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb151c2-2a65-4328-836d-12b57560a174 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance 'd1446290-95ce-4e87-85df-7cc69bb57ce7' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1401.256348] env[68285]: DEBUG nova.network.neutron [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.395347] env[68285]: DEBUG oslo_concurrency.lockutils [None req-fe86bd9a-f8ea-4c75-9053-53a40710173d tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.003s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.627270] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.651742] env[68285]: INFO nova.scheduler.client.report [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Deleted allocations for instance 801f524e-28b5-4452-b880-0fc30d3c5eef [ 1401.759545] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1401.759870] env[68285]: DEBUG nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Instance network_info: |[{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1401.760303] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:6f:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6228c66e-e1b1-4b17-bdb2-ae945380a77a', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1401.767784] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1401.768240] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1401.768470] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-413b83a3-37fc-4e2f-88bf-3239e9f52193 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.788364] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1401.788364] env[68285]: value = "task-2892699" [ 1401.788364] env[68285]: _type = "Task" [ 1401.788364] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.795809] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892699, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.854521] env[68285]: DEBUG nova.compute.manager [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1401.854764] env[68285]: DEBUG nova.compute.manager [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing instance network info cache due to event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1401.855089] env[68285]: DEBUG oslo_concurrency.lockutils [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] Acquiring lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.855394] env[68285]: DEBUG oslo_concurrency.lockutils [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] Acquired lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1401.855623] env[68285]: DEBUG nova.network.neutron [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1402.158208] env[68285]: DEBUG oslo_concurrency.lockutils [None req-109c8ef6-f050-4988-8287-757af803b46d tempest-ServerRescueNegativeTestJSON-696928139 tempest-ServerRescueNegativeTestJSON-696928139-project-member] Lock "801f524e-28b5-4452-b880-0fc30d3c5eef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.637s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1402.299638] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892699, 'name': CreateVM_Task, 'duration_secs': 0.325318} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.299817] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1402.300501] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.300668] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.300986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1402.301889] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffe87ef7-c912-4724-b9a0-8ac065b5aafe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.305560] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1402.305560] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e8b6d9-fcc7-f313-d6e9-8ba8b0b73eaf" [ 1402.305560] env[68285]: _type = "Task" [ 1402.305560] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.313380] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e8b6d9-fcc7-f313-d6e9-8ba8b0b73eaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.508347] env[68285]: DEBUG nova.compute.manager [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1402.508645] env[68285]: DEBUG nova.compute.manager [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing instance network info cache due to event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1402.508772] env[68285]: DEBUG oslo_concurrency.lockutils [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.508933] env[68285]: DEBUG oslo_concurrency.lockutils [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.509113] env[68285]: DEBUG nova.network.neutron [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1402.593788] env[68285]: DEBUG nova.network.neutron [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updated VIF entry in instance network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1402.594190] env[68285]: DEBUG nova.network.neutron [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.816257] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e8b6d9-fcc7-f313-d6e9-8ba8b0b73eaf, 'name': SearchDatastore_Task, 'duration_secs': 0.010767} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.816534] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1402.816772] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1402.817022] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.817171] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.817443] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1402.817705] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19946b77-8cba-400f-9212-c21d03528dc0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.825885] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1402.826049] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1402.826726] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf5bd26f-c391-42aa-87f0-e82541ea39fc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.831757] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1402.831757] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52021280-2ecc-b919-c328-b0fd57b28428" [ 1402.831757] env[68285]: _type = "Task" [ 1402.831757] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.839127] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52021280-2ecc-b919-c328-b0fd57b28428, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.990671] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.992176] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.992176] env[68285]: DEBUG nova.compute.manager [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Going to confirm migration 9 {{(pid=68285) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1403.097320] env[68285]: DEBUG oslo_concurrency.lockutils [req-662dea87-6dd0-4d63-a937-902a1b12e711 req-5aaeb310-88ea-4530-9246-19c63e06bf2e service nova] Releasing lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1403.318639] env[68285]: DEBUG nova.network.neutron [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updated VIF entry in instance network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1403.319043] env[68285]: DEBUG nova.network.neutron [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.342576] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52021280-2ecc-b919-c328-b0fd57b28428, 'name': SearchDatastore_Task, 'duration_secs': 0.008337} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.343398] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5710cf5b-c6cb-412a-b275-fc4ef1a27986 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.349313] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1403.349313] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52c31011-ed6f-30b3-b948-ce17fedead07" [ 1403.349313] env[68285]: _type = "Task" [ 1403.349313] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.357094] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c31011-ed6f-30b3-b948-ce17fedead07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.575131] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.575415] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1403.575507] env[68285]: DEBUG nova.network.neutron [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1403.575754] env[68285]: DEBUG nova.objects.instance [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'info_cache' on Instance uuid d1446290-95ce-4e87-85df-7cc69bb57ce7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1403.822329] env[68285]: DEBUG oslo_concurrency.lockutils [req-f843f051-b834-432e-b4c2-96bbb9030ddf req-3db88536-3c0b-479b-97e5-919e275e9f06 service nova] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1403.860445] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52c31011-ed6f-30b3-b948-ce17fedead07, 'name': SearchDatastore_Task, 'duration_secs': 0.040334} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.860681] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1403.860941] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b/f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1403.861227] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c93b2ce-d52d-4d9b-b44d-b69d17340359 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.867851] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1403.867851] env[68285]: value = "task-2892700" [ 1403.867851] env[68285]: _type = "Task" [ 1403.867851] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.875788] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.379719] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892700, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.879856] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.884707} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.880578] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b/f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1404.880978] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1404.881377] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-630fb7e9-0a15-4187-99c7-8dac05958e26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.891049] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1404.891049] env[68285]: value = "task-2892701" [ 1404.891049] env[68285]: _type = "Task" [ 1404.891049] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.898530] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.923502] env[68285]: DEBUG nova.network.neutron [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [{"id": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "address": "fa:16:3e:71:fc:5c", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbe7fd71-a3", "ovs_interfaceid": "dbe7fd71-a38e-450c-a4ef-497eaf455ff0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.398403] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "f8533809-ac64-4a1a-8fa8-45648110932d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1405.398658] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "f8533809-ac64-4a1a-8fa8-45648110932d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1405.404478] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.360171} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.404478] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1405.404906] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c31b807-5864-4a0f-814f-9a8ce866b466 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.428400] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b/f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1405.429543] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-d1446290-95ce-4e87-85df-7cc69bb57ce7" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.429811] env[68285]: DEBUG nova.objects.instance [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'migration_context' on Instance uuid d1446290-95ce-4e87-85df-7cc69bb57ce7 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.431100] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4e64268-ef2f-4642-b235-9493db09db76 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.446630] env[68285]: DEBUG nova.objects.base [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1405.447812] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3acc44c-89fc-45c3-a181-e96722a79e6b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.469300] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77be056c-57eb-46e8-ad84-12efdc5ef028 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.471770] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1405.471770] env[68285]: value = "task-2892702" [ 1405.471770] env[68285]: _type = "Task" [ 1405.471770] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.476543] env[68285]: DEBUG oslo_vmware.api [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1405.476543] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d320b2-e38d-814f-25a4-084e320f0830" [ 1405.476543] env[68285]: _type = "Task" [ 1405.476543] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.483285] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.488595] env[68285]: DEBUG oslo_vmware.api [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d320b2-e38d-814f-25a4-084e320f0830, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.904433] env[68285]: DEBUG nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1405.983684] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.988628] env[68285]: DEBUG oslo_vmware.api [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d320b2-e38d-814f-25a4-084e320f0830, 'name': SearchDatastore_Task, 'duration_secs': 0.012314} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.988945] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1405.989902] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.429732] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.482433] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892702, 'name': ReconfigVM_Task, 'duration_secs': 0.816886} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.482703] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Reconfigured VM instance instance-0000007a to attach disk [datastore1] f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b/f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1406.483371] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6622445d-f0a7-4c71-8d16-cc841f06cae5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.489240] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1406.489240] env[68285]: value = "task-2892703" [ 1406.489240] env[68285]: _type = "Task" [ 1406.489240] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.498646] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892703, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.560509] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4ed0a9-e9d7-4c06-a075-c38c5cdfd02f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.567735] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcd9bae-9469-41d9-8586-105a47e0ef38 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.598144] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff500f59-a50a-4eca-89c8-03e4b929db21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.606813] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635852c0-06d7-4602-9fd5-17843b722ec7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.623134] env[68285]: DEBUG nova.compute.provider_tree [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1407.000078] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892703, 'name': Rename_Task, 'duration_secs': 0.154251} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.000357] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.000610] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6df7addc-b0f9-4167-9e7b-1436fc79ad87 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.006879] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1407.006879] env[68285]: value = "task-2892704" [ 1407.006879] env[68285]: _type = "Task" [ 1407.006879] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.015746] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892704, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.126440] env[68285]: DEBUG nova.scheduler.client.report [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1407.517957] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892704, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.016837] env[68285]: DEBUG oslo_vmware.api [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892704, 'name': PowerOnVM_Task, 'duration_secs': 0.581582} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.017206] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.017309] env[68285]: INFO nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Took 7.71 seconds to spawn the instance on the hypervisor. [ 1408.017490] env[68285]: DEBUG nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1408.018274] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeb7ac8-6e9b-41c5-b910-5db5fc2234d0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.140659] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.151s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1408.143450] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.714s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1408.144882] env[68285]: INFO nova.compute.claims [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.536187] env[68285]: INFO nova.compute.manager [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Took 12.51 seconds to build instance. [ 1408.703673] env[68285]: INFO nova.scheduler.client.report [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted allocation for migration 9afec92a-7c47-455f-83f4-e7e9fb939454 [ 1408.866663] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.038519] env[68285]: DEBUG oslo_concurrency.lockutils [None req-3aa22f21-b60c-4e3b-805b-a7e5aac25fa8 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.026s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1409.208960] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b747c2d9-eb3f-4e9c-8b20-9c256c528d70 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.218s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1409.216837] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b0eeea-161d-49e2-ac12-ba40e4410068 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.225280] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cb77d4-b08b-41ad-b628-254abba93821 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.256140] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbe058d-1d12-40cc-9fa9-dcaa05ce4f97 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.263845] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763aced2-7e96-46d8-9366-4337ae165738 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.277778] env[68285]: DEBUG nova.compute.provider_tree [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.674250] env[68285]: DEBUG nova.compute.manager [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1409.674450] env[68285]: DEBUG nova.compute.manager [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing instance network info cache due to event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1409.674661] env[68285]: DEBUG oslo_concurrency.lockutils [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.674804] env[68285]: DEBUG oslo_concurrency.lockutils [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1409.674967] env[68285]: DEBUG nova.network.neutron [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.783314] env[68285]: DEBUG nova.scheduler.client.report [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1409.866535] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.116740] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.117148] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.117375] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.117608] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.117807] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.119966] env[68285]: INFO nova.compute.manager [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Terminating instance [ 1410.286585] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.143s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.287136] env[68285]: DEBUG nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1410.600315] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "005f8c9a-8327-4c60-a016-0460ca42f65f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.600587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.602170] env[68285]: DEBUG nova.network.neutron [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updated VIF entry in instance network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.602487] env[68285]: DEBUG nova.network.neutron [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.623949] env[68285]: DEBUG nova.compute.manager [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1410.624178] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1410.625080] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c1f1f3-02f6-490b-8904-46a3521bada0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.632784] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1410.633239] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1de10d2b-fc72-450e-b408-e41538b61e8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.639453] env[68285]: DEBUG oslo_vmware.api [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1410.639453] env[68285]: value = "task-2892705" [ 1410.639453] env[68285]: _type = "Task" [ 1410.639453] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.649571] env[68285]: DEBUG oslo_vmware.api [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.791643] env[68285]: DEBUG nova.compute.utils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1410.793158] env[68285]: DEBUG nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Not allocating networking since 'none' was specified. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1411.104898] env[68285]: DEBUG nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1411.107755] env[68285]: DEBUG oslo_concurrency.lockutils [req-4e6a224d-7ebe-4a35-bead-beadb879cbc0 req-d1c177b4-16ca-4728-907b-08b2e9c2afb3 service nova] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1411.149274] env[68285]: DEBUG oslo_vmware.api [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892705, 'name': PowerOffVM_Task, 'duration_secs': 0.246258} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.151749] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1411.151749] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1411.151749] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-447ce929-229b-44b2-bf59-236bf654655b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.210963] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1411.211347] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1411.211558] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleting the datastore file [datastore1] d1446290-95ce-4e87-85df-7cc69bb57ce7 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1411.211832] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1af6789-9daa-4809-ba20-bd9260469725 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.217786] env[68285]: DEBUG oslo_vmware.api [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1411.217786] env[68285]: value = "task-2892707" [ 1411.217786] env[68285]: _type = "Task" [ 1411.217786] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.225576] env[68285]: DEBUG oslo_vmware.api [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892707, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.294974] env[68285]: DEBUG nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1411.627835] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1411.628125] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1411.629610] env[68285]: INFO nova.compute.claims [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1411.727363] env[68285]: DEBUG oslo_vmware.api [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892707, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.384488} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.727626] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1411.727904] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1411.727990] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1411.728190] env[68285]: INFO nova.compute.manager [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1411.728439] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1411.728636] env[68285]: DEBUG nova.compute.manager [-] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1411.728736] env[68285]: DEBUG nova.network.neutron [-] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1412.171726] env[68285]: DEBUG nova.compute.manager [req-198175b3-9362-48b9-9051-8432612bd32a req-5b73a9ea-e56d-423d-8879-87b8bf9c60c9 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Received event network-vif-deleted-dbe7fd71-a38e-450c-a4ef-497eaf455ff0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1412.171726] env[68285]: INFO nova.compute.manager [req-198175b3-9362-48b9-9051-8432612bd32a req-5b73a9ea-e56d-423d-8879-87b8bf9c60c9 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Neutron deleted interface dbe7fd71-a38e-450c-a4ef-497eaf455ff0; detaching it from the instance and deleting it from the info cache [ 1412.171726] env[68285]: DEBUG nova.network.neutron [req-198175b3-9362-48b9-9051-8432612bd32a req-5b73a9ea-e56d-423d-8879-87b8bf9c60c9 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.303753] env[68285]: DEBUG nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1412.329773] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1412.330009] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.330173] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1412.330360] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.330506] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1412.330648] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1412.330852] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1412.331013] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1412.331180] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1412.331354] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1412.331552] env[68285]: DEBUG nova.virt.hardware [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1412.332615] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c014111-bbca-499a-afa0-f6123bf38757 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.340915] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ad2b43-19d7-4ecf-8268-c9a3e77c3c5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.356536] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1412.364029] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Creating folder: Project (a56e719e85d14b53b2efe6e5aa1a29c9). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1412.364029] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d7e8605-ce29-4659-a0b8-d04620e6eef2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.375020] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Created folder: Project (a56e719e85d14b53b2efe6e5aa1a29c9) in parent group-v580775. [ 1412.375020] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Creating folder: Instances. Parent ref: group-v581105. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1412.375020] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-754460d0-b9a6-46df-9afd-8bb3e26a108b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.385990] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Created folder: Instances in parent group-v581105. [ 1412.386270] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1412.386489] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1412.386696] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adda8a80-67c8-4c27-b2ae-4769b4287dae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.404048] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1412.404048] env[68285]: value = "task-2892710" [ 1412.404048] env[68285]: _type = "Task" [ 1412.404048] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.411472] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892710, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.652652] env[68285]: DEBUG nova.network.neutron [-] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.676348] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-126abbce-b938-422f-a828-8b211843cf70 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.686193] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13600f2-25a8-44c9-bcd5-0989719622d8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.715255] env[68285]: DEBUG nova.compute.manager [req-198175b3-9362-48b9-9051-8432612bd32a req-5b73a9ea-e56d-423d-8879-87b8bf9c60c9 service nova] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Detach interface failed, port_id=dbe7fd71-a38e-450c-a4ef-497eaf455ff0, reason: Instance d1446290-95ce-4e87-85df-7cc69bb57ce7 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1412.743176] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa2f9a2-0a65-4c8b-8694-a2756d5ce181 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.749983] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af183b6-818f-4630-b8a8-b83268e9695c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.781218] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a816a7-947b-42c3-96c6-44b8550e4b8e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.789271] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feba219e-fed7-4b1c-bf58-ffff1b722731 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.807122] env[68285]: DEBUG nova.compute.provider_tree [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.861379] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.914057] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892710, 'name': CreateVM_Task, 'duration_secs': 0.49594} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.914263] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1412.914769] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.914945] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1412.915286] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1412.915545] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7fa6300-e4b1-433a-850a-9de607c6f189 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.920691] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1412.920691] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f84729-f670-e67b-5007-c0a347597ce3" [ 1412.920691] env[68285]: _type = "Task" [ 1412.920691] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.928743] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f84729-f670-e67b-5007-c0a347597ce3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.155395] env[68285]: INFO nova.compute.manager [-] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Took 1.43 seconds to deallocate network for instance. [ 1413.310463] env[68285]: DEBUG nova.scheduler.client.report [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1413.366522] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.432036] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f84729-f670-e67b-5007-c0a347597ce3, 'name': SearchDatastore_Task, 'duration_secs': 0.010589} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.432036] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1413.432036] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1413.432036] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.432356] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1413.432356] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.432601] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d403cd8c-1e5a-46b9-b61f-9aa8dcfc4a8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.440908] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.441106] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1413.441799] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf78a118-8588-4f75-b849-9f2c1050396d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.447041] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1413.447041] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b3241e-be70-5ce3-4acf-d02852d97a9f" [ 1413.447041] env[68285]: _type = "Task" [ 1413.447041] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.453887] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b3241e-be70-5ce3-4acf-d02852d97a9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.661462] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.815961] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1413.816515] env[68285]: DEBUG nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1413.819236] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.158s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.819441] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1413.839025] env[68285]: INFO nova.scheduler.client.report [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted allocations for instance d1446290-95ce-4e87-85df-7cc69bb57ce7 [ 1413.959180] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b3241e-be70-5ce3-4acf-d02852d97a9f, 'name': SearchDatastore_Task, 'duration_secs': 0.008561} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.960011] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b322787-6304-4899-b7f8-a8a17e19a783 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.965536] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1413.965536] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5253cd94-a9fd-2162-ac40-b01d6259a9d9" [ 1413.965536] env[68285]: _type = "Task" [ 1413.965536] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.973565] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5253cd94-a9fd-2162-ac40-b01d6259a9d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.323212] env[68285]: DEBUG nova.compute.utils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1414.324730] env[68285]: DEBUG nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1414.324907] env[68285]: DEBUG nova.network.neutron [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1414.346874] env[68285]: DEBUG oslo_concurrency.lockutils [None req-68b3dde4-89b8-4ccc-b742-68695a132dcb tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "d1446290-95ce-4e87-85df-7cc69bb57ce7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.230s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.367458] env[68285]: DEBUG nova.policy [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1414.369252] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.476126] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5253cd94-a9fd-2162-ac40-b01d6259a9d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009993} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.476420] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1414.476676] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1414.476942] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23ea329d-5de8-4b7b-8b39-3aa808010c8b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.483487] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1414.483487] env[68285]: value = "task-2892711" [ 1414.483487] env[68285]: _type = "Task" [ 1414.483487] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.492759] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.703384] env[68285]: DEBUG nova.network.neutron [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Successfully created port: 379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1414.828848] env[68285]: DEBUG nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1414.866432] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.866757] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.993719] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892711, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503451} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.993982] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1414.994215] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1414.994498] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c41f23d1-13c7-41f6-838f-df95189b21e8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.000917] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1415.000917] env[68285]: value = "task-2892712" [ 1415.000917] env[68285]: _type = "Task" [ 1415.000917] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.008842] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892712, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.043405] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.043648] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.510919] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892712, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066793} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.511304] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.512211] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b6bc61-1ba2-45af-bc4c-faf944c6b5c1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.531268] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.531478] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3510e5b2-8166-4f30-9675-6c9bde62b011 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.545566] env[68285]: DEBUG nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1415.550573] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1415.550573] env[68285]: value = "task-2892713" [ 1415.550573] env[68285]: _type = "Task" [ 1415.550573] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.557947] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892713, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.839059] env[68285]: DEBUG nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1415.867172] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1415.867475] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1415.867672] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1415.867892] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1415.868108] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1415.868306] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1415.868566] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1415.868751] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1415.868956] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1415.869210] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1415.869461] env[68285]: DEBUG nova.virt.hardware [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1415.869917] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.870103] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1415.870926] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fe884e-4c6c-4548-b78d-c907e0dc8d12 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.873569] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.880552] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02765a21-8c4b-45ba-973a-e3e772348417 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.062903] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892713, 'name': ReconfigVM_Task, 'duration_secs': 0.353228} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.063192] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Reconfigured VM instance instance-0000007b to attach disk [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1416.063905] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8f7dd3e-4eb9-4ff2-9940-8507b8fec831 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.070325] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1416.070325] env[68285]: value = "task-2892714" [ 1416.070325] env[68285]: _type = "Task" [ 1416.070325] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.071313] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.071713] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1416.073574] env[68285]: INFO nova.compute.claims [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1416.085199] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892714, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.087338] env[68285]: DEBUG nova.compute.manager [req-da991dcd-f5f0-48bd-a30b-4456633b90ce req-38d47c46-d003-4ff8-9756-b22fe0165d7d service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-vif-plugged-379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1416.087540] env[68285]: DEBUG oslo_concurrency.lockutils [req-da991dcd-f5f0-48bd-a30b-4456633b90ce req-38d47c46-d003-4ff8-9756-b22fe0165d7d service nova] Acquiring lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.087796] env[68285]: DEBUG oslo_concurrency.lockutils [req-da991dcd-f5f0-48bd-a30b-4456633b90ce req-38d47c46-d003-4ff8-9756-b22fe0165d7d service nova] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1416.087908] env[68285]: DEBUG oslo_concurrency.lockutils [req-da991dcd-f5f0-48bd-a30b-4456633b90ce req-38d47c46-d003-4ff8-9756-b22fe0165d7d service nova] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1416.088146] env[68285]: DEBUG nova.compute.manager [req-da991dcd-f5f0-48bd-a30b-4456633b90ce req-38d47c46-d003-4ff8-9756-b22fe0165d7d service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] No waiting events found dispatching network-vif-plugged-379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1416.088324] env[68285]: WARNING nova.compute.manager [req-da991dcd-f5f0-48bd-a30b-4456633b90ce req-38d47c46-d003-4ff8-9756-b22fe0165d7d service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received unexpected event network-vif-plugged-379dbcb8-f7be-4c47-87de-5f6c87635d90 for instance with vm_state building and task_state spawning. [ 1416.174879] env[68285]: DEBUG nova.network.neutron [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Successfully updated port: 379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1416.377677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.588040] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892714, 'name': Rename_Task, 'duration_secs': 0.140043} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.588329] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1416.588493] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bba6c913-a691-44d8-929e-93638d1e0221 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.594556] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1416.594556] env[68285]: value = "task-2892715" [ 1416.594556] env[68285]: _type = "Task" [ 1416.594556] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.602017] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.677372] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.677535] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.677657] env[68285]: DEBUG nova.network.neutron [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1417.104035] env[68285]: DEBUG oslo_vmware.api [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892715, 'name': PowerOnVM_Task, 'duration_secs': 0.424227} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.106496] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1417.106709] env[68285]: INFO nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Took 4.80 seconds to spawn the instance on the hypervisor. [ 1417.106888] env[68285]: DEBUG nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1417.108015] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e6f671-9916-4673-83cb-108e48067a84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.158359] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d98702-fcbe-44c7-98ea-3d6f9420d0c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.165932] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66333b63-15ec-45f3-be65-3b0025fe0536 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.198606] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52af983a-753f-42ba-84d3-e4c681d00508 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.206723] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed91274-8744-42c5-ae66-25af2a62eeb6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.220573] env[68285]: DEBUG nova.compute.provider_tree [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.230370] env[68285]: DEBUG nova.network.neutron [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1417.358053] env[68285]: DEBUG nova.network.neutron [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.623586] env[68285]: INFO nova.compute.manager [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Took 11.21 seconds to build instance. [ 1417.724325] env[68285]: DEBUG nova.scheduler.client.report [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1417.860847] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1417.861259] env[68285]: DEBUG nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Instance network_info: |[{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1417.861721] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:ee:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '379dbcb8-f7be-4c47-87de-5f6c87635d90', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1417.869432] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1417.869981] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1417.870244] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0be0b4f-24b6-4b92-aadf-f811fc6839a2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.890415] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1417.890415] env[68285]: value = "task-2892716" [ 1417.890415] env[68285]: _type = "Task" [ 1417.890415] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.898163] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892716, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.114941] env[68285]: DEBUG nova.compute.manager [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1418.115159] env[68285]: DEBUG nova.compute.manager [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing instance network info cache due to event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1418.115376] env[68285]: DEBUG oslo_concurrency.lockutils [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.115520] env[68285]: DEBUG oslo_concurrency.lockutils [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1418.115709] env[68285]: DEBUG nova.network.neutron [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1418.126366] env[68285]: DEBUG oslo_concurrency.lockutils [None req-25e14a0e-7d45-4f63-b988-abb6014f08bb tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "f8533809-ac64-4a1a-8fa8-45648110932d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.728s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.229020] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.157s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.229547] env[68285]: DEBUG nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1418.232158] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.855s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.232281] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.232429] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1418.233766] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e50f68a-d392-4b94-9adb-a8739d1f19aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.241754] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73e1877-c0d7-409e-8002-122742c6983f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.256859] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d67ae7-b475-447e-8f0d-ad8e69d44365 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.263644] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b009de50-f65f-4c3f-833b-30e713dee819 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.293561] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179343MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1418.293710] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.293916] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.400851] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892716, 'name': CreateVM_Task, 'duration_secs': 0.309589} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.401035] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1418.401835] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.402052] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1418.402372] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1418.402801] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aadf066-edb1-4646-a8c6-517c000f6e59 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.407024] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1418.407024] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5259e19c-6e94-e38f-60fb-f51c0e814d8e" [ 1418.407024] env[68285]: _type = "Task" [ 1418.407024] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.414641] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5259e19c-6e94-e38f-60fb-f51c0e814d8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.648907] env[68285]: INFO nova.compute.manager [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Rebuilding instance [ 1418.688237] env[68285]: DEBUG nova.compute.manager [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1418.689092] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df49e18-6830-472f-96a6-60be04f98d0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.735467] env[68285]: DEBUG nova.compute.utils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1418.736760] env[68285]: DEBUG nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1418.736954] env[68285]: DEBUG nova.network.neutron [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1418.773715] env[68285]: DEBUG nova.policy [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41e116b3ac9d4c7386847a5559ea313c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43491d0bdffc49eaaad084f3124cffcb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1418.831154] env[68285]: DEBUG nova.network.neutron [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updated VIF entry in instance network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1418.831609] env[68285]: DEBUG nova.network.neutron [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.918809] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5259e19c-6e94-e38f-60fb-f51c0e814d8e, 'name': SearchDatastore_Task, 'duration_secs': 0.012551} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.919153] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1418.919668] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1418.919668] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.919855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1418.919913] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1418.920165] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20635dac-08c4-44d6-bf43-b094e0dbd27f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.928887] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1418.929074] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1418.929754] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-217333e9-0194-4cd4-8e29-ce157937ae44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.934812] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1418.934812] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]520fded3-3300-7e84-b731-672c2d9f090f" [ 1418.934812] env[68285]: _type = "Task" [ 1418.934812] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.942412] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520fded3-3300-7e84-b731-672c2d9f090f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.026655] env[68285]: DEBUG nova.network.neutron [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Successfully created port: 7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1419.239867] env[68285]: DEBUG nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1419.319257] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f9d35416-1f7f-4bf5-baba-1ce4e7436341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.319423] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.319550] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f8533809-ac64-4a1a-8fa8-45648110932d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.319669] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 005f8c9a-8327-4c60-a016-0460ca42f65f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.319784] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a8a67f90-047d-49ce-8de0-ee3e17998c6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.319959] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1419.320108] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1419.333872] env[68285]: DEBUG oslo_concurrency.lockutils [req-c5d75406-9d63-492f-91e5-3a99bb9769bc req-fcfff3fc-9430-4081-8e83-62c8b5a75755 service nova] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1419.386963] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccc4a53-106f-4a63-a9e9-91474b3fca4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.395176] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa81525-8f64-42ba-9653-cdd5b90e656f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.425875] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80920e7-b59d-47ce-b494-7b8743870fa2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.433130] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8be7d0-220d-45d1-9a68-dca494f97a66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.449260] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1419.453753] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]520fded3-3300-7e84-b731-672c2d9f090f, 'name': SearchDatastore_Task, 'duration_secs': 0.008716} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.454783] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fe3d647-338a-469b-9cbb-4c65ed69dc77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.459803] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1419.459803] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52cc6ae5-e38b-ae29-d6eb-4eb481723ec0" [ 1419.459803] env[68285]: _type = "Task" [ 1419.459803] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.469054] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52cc6ae5-e38b-ae29-d6eb-4eb481723ec0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.704683] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.704980] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d3d676d-1812-405f-89e2-d8248534a95d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.712565] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1419.712565] env[68285]: value = "task-2892717" [ 1419.712565] env[68285]: _type = "Task" [ 1419.712565] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.722262] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892717, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.955445] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1419.969668] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52cc6ae5-e38b-ae29-d6eb-4eb481723ec0, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.969964] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1419.970282] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 005f8c9a-8327-4c60-a016-0460ca42f65f/005f8c9a-8327-4c60-a016-0460ca42f65f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1419.970579] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef445e7a-c8af-4a30-b916-4eeda4821e2a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.978280] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1419.978280] env[68285]: value = "task-2892718" [ 1419.978280] env[68285]: _type = "Task" [ 1419.978280] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.986846] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.224259] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892717, 'name': PowerOffVM_Task, 'duration_secs': 0.17796} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.224585] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.225281] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1420.226183] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8468902-3bf3-49fc-8dce-dee3e75d8030 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.234536] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1420.234829] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b093029-bea6-47b5-9b31-c39f8580a1c7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.250239] env[68285]: DEBUG nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1420.266610] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1420.266866] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1420.267075] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Deleting the datastore file [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1420.267574] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5a96435-1e12-4e16-a50c-c55d865d1442 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.276310] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1420.276310] env[68285]: value = "task-2892720" [ 1420.276310] env[68285]: _type = "Task" [ 1420.276310] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.278824] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1420.279563] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1420.279563] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1420.279718] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1420.279813] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1420.279948] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1420.280241] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1420.280442] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1420.280640] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1420.280841] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1420.281046] env[68285]: DEBUG nova.virt.hardware [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1420.282103] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed890a2-7d81-4a7c-bec2-d40bb60215b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.296749] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78898e0a-0191-41ac-b924-c18ca2e05a95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.301253] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.414686] env[68285]: DEBUG nova.compute.manager [req-6e7db641-1bc5-4e7e-bc11-63a3e3549b41 req-8f88e584-549c-47a9-97ea-6679cd496e23 service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Received event network-vif-plugged-7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1420.414940] env[68285]: DEBUG oslo_concurrency.lockutils [req-6e7db641-1bc5-4e7e-bc11-63a3e3549b41 req-8f88e584-549c-47a9-97ea-6679cd496e23 service nova] Acquiring lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1420.415231] env[68285]: DEBUG oslo_concurrency.lockutils [req-6e7db641-1bc5-4e7e-bc11-63a3e3549b41 req-8f88e584-549c-47a9-97ea-6679cd496e23 service nova] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1420.415396] env[68285]: DEBUG oslo_concurrency.lockutils [req-6e7db641-1bc5-4e7e-bc11-63a3e3549b41 req-8f88e584-549c-47a9-97ea-6679cd496e23 service nova] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.415640] env[68285]: DEBUG nova.compute.manager [req-6e7db641-1bc5-4e7e-bc11-63a3e3549b41 req-8f88e584-549c-47a9-97ea-6679cd496e23 service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] No waiting events found dispatching network-vif-plugged-7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1420.415832] env[68285]: WARNING nova.compute.manager [req-6e7db641-1bc5-4e7e-bc11-63a3e3549b41 req-8f88e584-549c-47a9-97ea-6679cd496e23 service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Received unexpected event network-vif-plugged-7c7b1a5b-57fa-4ae1-a454-3256e454042e for instance with vm_state building and task_state spawning. [ 1420.461103] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1420.461278] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.167s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.487879] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49478} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.488177] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 005f8c9a-8327-4c60-a016-0460ca42f65f/005f8c9a-8327-4c60-a016-0460ca42f65f.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1420.488392] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1420.488644] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b5acbdb-bc75-4835-8fc5-4bfe82e979b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.495476] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1420.495476] env[68285]: value = "task-2892721" [ 1420.495476] env[68285]: _type = "Task" [ 1420.495476] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.502887] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.506308] env[68285]: DEBUG nova.network.neutron [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Successfully updated port: 7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1420.790805] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182318} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.791162] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1420.791306] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1420.791431] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1421.005506] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066599} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.005771] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1421.006560] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2e50b2-3697-4463-963c-8aa76211231d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.009170] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.009300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1421.009440] env[68285]: DEBUG nova.network.neutron [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1421.029315] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 005f8c9a-8327-4c60-a016-0460ca42f65f/005f8c9a-8327-4c60-a016-0460ca42f65f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1421.030133] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d18a757b-5b21-4cdd-aec4-35162b2bf050 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.049503] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1421.049503] env[68285]: value = "task-2892722" [ 1421.049503] env[68285]: _type = "Task" [ 1421.049503] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.057306] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.540862] env[68285]: DEBUG nova.network.neutron [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1421.560028] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.680706] env[68285]: DEBUG nova.network.neutron [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.820983] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1421.821265] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1421.821416] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1421.821595] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1421.821743] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1421.821889] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1421.822107] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1421.822268] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1421.822434] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1421.822597] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1421.822768] env[68285]: DEBUG nova.virt.hardware [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1421.823666] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16139ab1-2489-4c50-806e-55448d2b1008 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.832302] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20159ae8-644f-42d5-bbe4-917dcbd475f2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.845323] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Instance VIF info [] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1421.850708] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1421.850932] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1421.851149] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a147ab0-41f3-4138-8acc-a77fb73b120c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.867543] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1421.867543] env[68285]: value = "task-2892723" [ 1421.867543] env[68285]: _type = "Task" [ 1421.867543] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.874596] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892723, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.062358] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892722, 'name': ReconfigVM_Task, 'duration_secs': 0.741972} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.062769] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 005f8c9a-8327-4c60-a016-0460ca42f65f/005f8c9a-8327-4c60-a016-0460ca42f65f.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.063640] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-985fb08d-043c-4f9b-81b6-71a0ef96da7e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.071831] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1422.071831] env[68285]: value = "task-2892724" [ 1422.071831] env[68285]: _type = "Task" [ 1422.071831] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.085672] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892724, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.183680] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1422.184025] env[68285]: DEBUG nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Instance network_info: |[{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1422.184471] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:de:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c7b1a5b-57fa-4ae1-a454-3256e454042e', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1422.192260] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1422.192466] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1422.192696] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e8514e4-66d6-4319-9a02-a4b98418ac36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.211714] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1422.211714] env[68285]: value = "task-2892725" [ 1422.211714] env[68285]: _type = "Task" [ 1422.211714] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.219891] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892725, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.377940] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892723, 'name': CreateVM_Task, 'duration_secs': 0.285121} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.378171] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1422.378550] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.378710] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1422.379065] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1422.379321] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8da7d49-cf21-4160-9bcb-81eee25ffd60 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.383694] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1422.383694] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5207b446-d7d4-609d-3bb7-070b6175111d" [ 1422.383694] env[68285]: _type = "Task" [ 1422.383694] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.391359] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5207b446-d7d4-609d-3bb7-070b6175111d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.441982] env[68285]: DEBUG nova.compute.manager [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Received event network-changed-7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1422.442200] env[68285]: DEBUG nova.compute.manager [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Refreshing instance network info cache due to event network-changed-7c7b1a5b-57fa-4ae1-a454-3256e454042e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1422.442411] env[68285]: DEBUG oslo_concurrency.lockutils [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] Acquiring lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.442640] env[68285]: DEBUG oslo_concurrency.lockutils [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] Acquired lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1422.442698] env[68285]: DEBUG nova.network.neutron [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Refreshing network info cache for port 7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.586500] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892724, 'name': Rename_Task, 'duration_secs': 0.152739} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.586908] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1422.587273] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22b54b83-174b-4252-86d4-75c568e4eac7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.595297] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1422.595297] env[68285]: value = "task-2892726" [ 1422.595297] env[68285]: _type = "Task" [ 1422.595297] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.606578] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.722689] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892725, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.894890] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5207b446-d7d4-609d-3bb7-070b6175111d, 'name': SearchDatastore_Task, 'duration_secs': 0.010671} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.895391] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1422.895527] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1422.895760] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.895941] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1422.896238] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1422.896445] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e43fc93-4c2a-4d95-96d5-6c8a90370d21 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.905185] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1422.905368] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1422.906239] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c3e704a-b441-4f2f-955e-fd5df62dabfc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.912026] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1422.912026] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5257e6b0-3c98-e41c-0faa-633917188a32" [ 1422.912026] env[68285]: _type = "Task" [ 1422.912026] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.919986] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5257e6b0-3c98-e41c-0faa-633917188a32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.105438] env[68285]: DEBUG oslo_vmware.api [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892726, 'name': PowerOnVM_Task, 'duration_secs': 0.45004} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.105705] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1423.105912] env[68285]: INFO nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Took 7.27 seconds to spawn the instance on the hypervisor. [ 1423.106127] env[68285]: DEBUG nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1423.106922] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4390d266-5840-4462-af87-bef5c3523d19 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.148509] env[68285]: DEBUG nova.network.neutron [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updated VIF entry in instance network info cache for port 7c7b1a5b-57fa-4ae1-a454-3256e454042e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1423.148887] env[68285]: DEBUG nova.network.neutron [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.223590] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892725, 'name': CreateVM_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.424749] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5257e6b0-3c98-e41c-0faa-633917188a32, 'name': SearchDatastore_Task, 'duration_secs': 0.009576} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.425556] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c60933ea-c698-4012-9faa-85c804ebd2e9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.431069] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1423.431069] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5215b09d-72ce-885a-a306-b06c70c1d6ae" [ 1423.431069] env[68285]: _type = "Task" [ 1423.431069] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.439353] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5215b09d-72ce-885a-a306-b06c70c1d6ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.626491] env[68285]: INFO nova.compute.manager [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Took 12.02 seconds to build instance. [ 1423.651446] env[68285]: DEBUG oslo_concurrency.lockutils [req-a3997486-6ecb-4f2e-9f7b-b105a9bd3044 req-2485a0de-9ba4-4801-9e36-dc9384de499c service nova] Releasing lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1423.724463] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892725, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.941689] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5215b09d-72ce-885a-a306-b06c70c1d6ae, 'name': SearchDatastore_Task, 'duration_secs': 0.013823} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.942069] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1423.942313] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1423.942576] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef5dda96-027e-4bf1-b31b-da5779fa3a91 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.949222] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1423.949222] env[68285]: value = "task-2892727" [ 1423.949222] env[68285]: _type = "Task" [ 1423.949222] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.957248] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.128542] env[68285]: DEBUG oslo_concurrency.lockutils [None req-4f1c0319-2d6d-4b5f-b960-4d63c9642591 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.528s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1424.225081] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892725, 'name': CreateVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.459940] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892727, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.548632] env[68285]: DEBUG nova.compute.manager [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1424.548789] env[68285]: DEBUG nova.compute.manager [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing instance network info cache due to event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1424.548874] env[68285]: DEBUG oslo_concurrency.lockutils [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.549059] env[68285]: DEBUG oslo_concurrency.lockutils [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1424.549766] env[68285]: DEBUG nova.network.neutron [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.725713] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892725, 'name': CreateVM_Task, 'duration_secs': 2.227748} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.726573] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1424.726771] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.726944] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1424.728037] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1424.728037] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf3b11b7-ab19-4efb-9404-51dbeced03fe {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.731606] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1424.731606] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5286ecca-b1ec-1264-b9f2-e83f8fcfb4d6" [ 1424.731606] env[68285]: _type = "Task" [ 1424.731606] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.738967] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5286ecca-b1ec-1264-b9f2-e83f8fcfb4d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.960893] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720983} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.961247] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1424.961396] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1424.961648] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e91eb7a9-e0de-4005-8ae5-b9ff5a8833ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.968343] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1424.968343] env[68285]: value = "task-2892728" [ 1424.968343] env[68285]: _type = "Task" [ 1424.968343] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.976721] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892728, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.191511] env[68285]: DEBUG nova.compute.manager [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1425.191712] env[68285]: DEBUG nova.compute.manager [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing instance network info cache due to event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1425.191916] env[68285]: DEBUG oslo_concurrency.lockutils [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.192145] env[68285]: DEBUG oslo_concurrency.lockutils [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1425.192323] env[68285]: DEBUG nova.network.neutron [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1425.243035] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5286ecca-b1ec-1264-b9f2-e83f8fcfb4d6, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.243328] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.243699] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1425.243849] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.243995] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1425.244189] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.244513] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c31d395-5630-4934-aec1-44d71954c7e7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.252456] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.252637] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1425.253326] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb4e920-573f-4080-bb68-027e9b0fd886 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.256196] env[68285]: DEBUG nova.network.neutron [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updated VIF entry in instance network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.256510] env[68285]: DEBUG nova.network.neutron [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.260334] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1425.260334] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5235110b-d1c5-0c87-89df-5b1ec1798706" [ 1425.260334] env[68285]: _type = "Task" [ 1425.260334] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.267804] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5235110b-d1c5-0c87-89df-5b1ec1798706, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.477499] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892728, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07652} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.477712] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1425.478496] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2d159a-951a-4adb-b432-c67a2ae5a53e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.497681] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1425.497911] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9a9af4b-a778-4a25-acca-f317bdef7aee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.516382] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1425.516382] env[68285]: value = "task-2892729" [ 1425.516382] env[68285]: _type = "Task" [ 1425.516382] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.523860] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892729, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.758771] env[68285]: DEBUG oslo_concurrency.lockutils [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.759089] env[68285]: DEBUG nova.compute.manager [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1425.759305] env[68285]: DEBUG nova.compute.manager [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing instance network info cache due to event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1425.759543] env[68285]: DEBUG oslo_concurrency.lockutils [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.770487] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5235110b-d1c5-0c87-89df-5b1ec1798706, 'name': SearchDatastore_Task, 'duration_secs': 0.008343} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.773613] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-002f2f3a-40bc-4f7e-a4f6-71c298550fc4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.778629] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1425.778629] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52ecb4f3-3094-789e-c033-2a1615f4e69a" [ 1425.778629] env[68285]: _type = "Task" [ 1425.778629] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.786308] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ecb4f3-3094-789e-c033-2a1615f4e69a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.896188] env[68285]: DEBUG nova.network.neutron [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updated VIF entry in instance network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.896615] env[68285]: DEBUG nova.network.neutron [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.026438] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892729, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.289481] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52ecb4f3-3094-789e-c033-2a1615f4e69a, 'name': SearchDatastore_Task, 'duration_secs': 0.01411} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.289637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1426.289800] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1426.290120] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91400676-f83e-4963-a6ec-f4827d847cee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.297078] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1426.297078] env[68285]: value = "task-2892730" [ 1426.297078] env[68285]: _type = "Task" [ 1426.297078] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.304442] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.399557] env[68285]: DEBUG oslo_concurrency.lockutils [req-223c8858-3d35-4a4b-800b-365dd8a71d9f req-cf32c0e0-a2a0-4f46-afb6-e08935e0988b service nova] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1426.399995] env[68285]: DEBUG oslo_concurrency.lockutils [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1426.400219] env[68285]: DEBUG nova.network.neutron [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.528148] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892729, 'name': ReconfigVM_Task, 'duration_secs': 0.515761} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.528508] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Reconfigured VM instance instance-0000007b to attach disk [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d/f8533809-ac64-4a1a-8fa8-45648110932d.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1426.529098] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9369841f-c6e9-459f-b1cb-ebbc90ed543d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.535764] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1426.535764] env[68285]: value = "task-2892731" [ 1426.535764] env[68285]: _type = "Task" [ 1426.535764] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.547450] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892731, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.806632] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461039} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.806917] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1426.807144] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1426.807458] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb2711c8-52f8-4aef-a7ea-7edf271c56f3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.813813] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1426.813813] env[68285]: value = "task-2892732" [ 1426.813813] env[68285]: _type = "Task" [ 1426.813813] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.820739] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892732, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.045013] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892731, 'name': Rename_Task, 'duration_secs': 0.143706} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.045317] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1427.045531] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41a9fed4-0fa2-462e-aed8-7725f001ecce {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.050759] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1427.050759] env[68285]: value = "task-2892733" [ 1427.050759] env[68285]: _type = "Task" [ 1427.050759] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.059908] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.085599] env[68285]: DEBUG nova.network.neutron [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updated VIF entry in instance network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.086117] env[68285]: DEBUG nova.network.neutron [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.217216] env[68285]: DEBUG nova.compute.manager [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1427.217399] env[68285]: DEBUG nova.compute.manager [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing instance network info cache due to event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1427.217613] env[68285]: DEBUG oslo_concurrency.lockutils [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.217756] env[68285]: DEBUG oslo_concurrency.lockutils [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1427.218504] env[68285]: DEBUG nova.network.neutron [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1427.323288] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892732, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06882} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.323565] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1427.324272] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b74f118-5afb-42b7-afd0-8c9d2b6528f1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.346100] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.346357] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b5b0a88-1277-4204-afed-493ed21cf937 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.366091] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1427.366091] env[68285]: value = "task-2892734" [ 1427.366091] env[68285]: _type = "Task" [ 1427.366091] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.373827] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892734, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.562547] env[68285]: DEBUG oslo_vmware.api [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892733, 'name': PowerOnVM_Task, 'duration_secs': 0.485644} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.562839] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1427.563066] env[68285]: DEBUG nova.compute.manager [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1427.563888] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a1eead-fec5-4f59-8519-5816620af661 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.588394] env[68285]: DEBUG oslo_concurrency.lockutils [req-1336ca68-1c33-4837-963e-67c5c7ca877a req-97e701fc-5779-4c0a-9e5c-35f42fe6fe0a service nova] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1427.877363] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.964685] env[68285]: DEBUG nova.network.neutron [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updated VIF entry in instance network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.965085] env[68285]: DEBUG nova.network.neutron [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.080691] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.080979] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1428.081161] env[68285]: DEBUG nova.objects.instance [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68285) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1428.376296] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892734, 'name': ReconfigVM_Task, 'duration_secs': 0.926719} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.376536] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfigured VM instance instance-0000007d to attach disk [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1428.377197] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-701245f4-0da4-42be-8579-8d9e800d862b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.382840] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1428.382840] env[68285]: value = "task-2892735" [ 1428.382840] env[68285]: _type = "Task" [ 1428.382840] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.391402] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892735, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.469420] env[68285]: DEBUG oslo_concurrency.lockutils [req-713cc707-1c81-4a8a-a9d0-09ea94a08055 req-96007c51-eac5-4483-8920-cd88866099f7 service nova] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1428.510325] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "f8533809-ac64-4a1a-8fa8-45648110932d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.510602] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "f8533809-ac64-4a1a-8fa8-45648110932d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1428.510823] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "f8533809-ac64-4a1a-8fa8-45648110932d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.511048] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "f8533809-ac64-4a1a-8fa8-45648110932d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1428.511248] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "f8533809-ac64-4a1a-8fa8-45648110932d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1428.513568] env[68285]: INFO nova.compute.manager [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Terminating instance [ 1428.892833] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892735, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.017794] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "refresh_cache-f8533809-ac64-4a1a-8fa8-45648110932d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.018014] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquired lock "refresh_cache-f8533809-ac64-4a1a-8fa8-45648110932d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1429.018224] env[68285]: DEBUG nova.network.neutron [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1429.089311] env[68285]: DEBUG oslo_concurrency.lockutils [None req-20f33b4a-41d7-4d33-901c-163d01e628c4 tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.393711] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892735, 'name': Rename_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.536150] env[68285]: DEBUG nova.network.neutron [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1429.584831] env[68285]: DEBUG nova.network.neutron [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.894257] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892735, 'name': Rename_Task, 'duration_secs': 1.159715} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.894548] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1429.894772] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b880197-970b-45d6-8b20-91627a2b1b36 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.902555] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1429.902555] env[68285]: value = "task-2892736" [ 1429.902555] env[68285]: _type = "Task" [ 1429.902555] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.910158] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892736, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.087888] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Releasing lock "refresh_cache-f8533809-ac64-4a1a-8fa8-45648110932d" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1430.088340] env[68285]: DEBUG nova.compute.manager [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1430.088534] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.089422] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bac80d-5cbb-4617-bdb8-0426fc9238e2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.096604] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1430.096931] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b07f9c88-8d5e-433d-83bd-767ac8b93b75 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.102457] env[68285]: DEBUG oslo_vmware.api [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1430.102457] env[68285]: value = "task-2892737" [ 1430.102457] env[68285]: _type = "Task" [ 1430.102457] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.111449] env[68285]: DEBUG oslo_vmware.api [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.412185] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892736, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.611606] env[68285]: DEBUG oslo_vmware.api [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892737, 'name': PowerOffVM_Task, 'duration_secs': 0.13519} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.611900] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1430.612089] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1430.612343] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-796e6f1e-5da5-4185-aa79-88784e58c638 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.636077] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1430.636305] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1430.636489] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Deleting the datastore file [datastore1] f8533809-ac64-4a1a-8fa8-45648110932d {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1430.636776] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19ad4c5f-d14b-4e2b-81d6-aad20ef7ebe5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.643088] env[68285]: DEBUG oslo_vmware.api [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for the task: (returnval){ [ 1430.643088] env[68285]: value = "task-2892739" [ 1430.643088] env[68285]: _type = "Task" [ 1430.643088] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.650983] env[68285]: DEBUG oslo_vmware.api [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.912942] env[68285]: DEBUG oslo_vmware.api [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892736, 'name': PowerOnVM_Task, 'duration_secs': 0.595} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.913241] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1430.913444] env[68285]: INFO nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1430.913668] env[68285]: DEBUG nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1430.914436] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966d7734-bfb0-4e33-bab1-132014604b6f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.152260] env[68285]: DEBUG oslo_vmware.api [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Task: {'id': task-2892739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104723} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.152607] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.152685] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.152866] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.153054] env[68285]: INFO nova.compute.manager [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Took 1.06 seconds to destroy the instance on the hypervisor. [ 1431.153299] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1431.153485] env[68285]: DEBUG nova.compute.manager [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1431.153601] env[68285]: DEBUG nova.network.neutron [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1431.172658] env[68285]: DEBUG nova.network.neutron [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1431.430793] env[68285]: INFO nova.compute.manager [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Took 15.38 seconds to build instance. [ 1431.675779] env[68285]: DEBUG nova.network.neutron [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.686921] env[68285]: DEBUG nova.compute.manager [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Received event network-changed-7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1431.687297] env[68285]: DEBUG nova.compute.manager [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Refreshing instance network info cache due to event network-changed-7c7b1a5b-57fa-4ae1-a454-3256e454042e. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1431.687606] env[68285]: DEBUG oslo_concurrency.lockutils [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] Acquiring lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.687822] env[68285]: DEBUG oslo_concurrency.lockutils [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] Acquired lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1431.688075] env[68285]: DEBUG nova.network.neutron [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Refreshing network info cache for port 7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1431.932911] env[68285]: DEBUG oslo_concurrency.lockutils [None req-99b16689-ab72-4aec-bf89-3fed2ec107d0 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.889s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1432.179485] env[68285]: INFO nova.compute.manager [-] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Took 1.03 seconds to deallocate network for instance. [ 1432.434548] env[68285]: DEBUG nova.network.neutron [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updated VIF entry in instance network info cache for port 7c7b1a5b-57fa-4ae1-a454-3256e454042e. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1432.434947] env[68285]: DEBUG nova.network.neutron [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.686556] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.686798] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.687033] env[68285]: DEBUG nova.objects.instance [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lazy-loading 'resources' on Instance uuid f8533809-ac64-4a1a-8fa8-45648110932d {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1432.938103] env[68285]: DEBUG oslo_concurrency.lockutils [req-2261cf0f-6b7a-4572-a82f-299fcb7c4d00 req-805fcf3c-af30-4571-aea3-cd2f9654e40c service nova] Releasing lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1433.266509] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9467a338-470e-48b3-a479-d71c90fd6d8a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.274380] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eb53c2-5f8a-4fab-a83f-860881f67206 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.305708] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be1252e-282b-4481-9ac2-f3358cbf84ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.312779] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fb213e-58a6-49cf-ba0e-46d86a1adea1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.325759] env[68285]: DEBUG nova.compute.provider_tree [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.829510] env[68285]: DEBUG nova.scheduler.client.report [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1434.334978] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.357441] env[68285]: INFO nova.scheduler.client.report [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Deleted allocations for instance f8533809-ac64-4a1a-8fa8-45648110932d [ 1434.864587] env[68285]: DEBUG oslo_concurrency.lockutils [None req-e58c0cec-ee0b-4804-84bd-634b26f6915e tempest-ServersListShow298Test-1288957287 tempest-ServersListShow298Test-1288957287-project-member] Lock "f8533809-ac64-4a1a-8fa8-45648110932d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.354s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.426772] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.427288] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.427288] env[68285]: INFO nova.compute.manager [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Shelving [ 1441.436883] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1441.437272] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e63ff24-53ed-4ef6-a1bc-cf19c6e97cb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.444598] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1441.444598] env[68285]: value = "task-2892740" [ 1441.444598] env[68285]: _type = "Task" [ 1441.444598] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.452643] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.954806] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892740, 'name': PowerOffVM_Task, 'duration_secs': 0.16686} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.955088] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1441.955889] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea50987-fe63-4e0c-a1c6-50684faeac3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.973891] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8535c3-08bc-4407-a329-47ad49208f59 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.483915] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1442.484341] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-009e74ae-c1a8-474a-a88c-6d2e7d11ccbd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.491380] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1442.491380] env[68285]: value = "task-2892741" [ 1442.491380] env[68285]: _type = "Task" [ 1442.491380] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.499463] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892741, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.001656] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892741, 'name': CreateSnapshot_Task, 'duration_secs': 0.461804} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.001941] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1443.002668] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6ac9e4-e9b9-4135-9cda-1e6bbc0e6fe7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.524230] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1443.524646] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0fb40b57-65bc-4e47-b5b8-58e40ffdc96f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.538188] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1443.538188] env[68285]: value = "task-2892742" [ 1443.538188] env[68285]: _type = "Task" [ 1443.538188] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.549180] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892742, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.050148] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892742, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.548283] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892742, 'name': CloneVM_Task, 'duration_secs': 0.976074} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.548755] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Created linked-clone VM from snapshot [ 1444.549478] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfea19ee-8685-4926-a137-2beeb5057609 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.557141] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Uploading image e4ec1404-d856-414d-bbe3-4d0fdba0b312 {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1444.577629] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1444.577629] env[68285]: value = "vm-581112" [ 1444.577629] env[68285]: _type = "VirtualMachine" [ 1444.577629] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1444.577893] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ba241375-5340-476c-9953-a9f7e5aaa4cf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.584416] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease: (returnval){ [ 1444.584416] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521c3ce2-8348-2b37-e8e8-0676a78eb100" [ 1444.584416] env[68285]: _type = "HttpNfcLease" [ 1444.584416] env[68285]: } obtained for exporting VM: (result){ [ 1444.584416] env[68285]: value = "vm-581112" [ 1444.584416] env[68285]: _type = "VirtualMachine" [ 1444.584416] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1444.584907] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the lease: (returnval){ [ 1444.584907] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521c3ce2-8348-2b37-e8e8-0676a78eb100" [ 1444.584907] env[68285]: _type = "HttpNfcLease" [ 1444.584907] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1444.590492] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1444.590492] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521c3ce2-8348-2b37-e8e8-0676a78eb100" [ 1444.590492] env[68285]: _type = "HttpNfcLease" [ 1444.590492] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1445.008995] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-b24692ad-b6df-4cc6-937d-61afea866aad" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1445.009664] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-b24692ad-b6df-4cc6-937d-61afea866aad" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.010046] env[68285]: DEBUG nova.objects.instance [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'flavor' on Instance uuid f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1445.093516] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1445.093516] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521c3ce2-8348-2b37-e8e8-0676a78eb100" [ 1445.093516] env[68285]: _type = "HttpNfcLease" [ 1445.093516] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1445.093827] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1445.093827] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]521c3ce2-8348-2b37-e8e8-0676a78eb100" [ 1445.093827] env[68285]: _type = "HttpNfcLease" [ 1445.093827] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1445.094522] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9773c9-6998-4bcb-860d-8e6d1581eda5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.101502] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522830c7-3ac9-bd4f-e97a-816eb4aa7c13/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1445.101674] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522830c7-3ac9-bd4f-e97a-816eb4aa7c13/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1445.190201] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9a4a371c-3c2b-4114-a2d7-7f7f8d357af3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.651380] env[68285]: DEBUG nova.objects.instance [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'pci_requests' on Instance uuid f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1446.154606] env[68285]: DEBUG nova.objects.base [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1446.154801] env[68285]: DEBUG nova.network.neutron [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1446.220063] env[68285]: DEBUG nova.policy [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1447.599481] env[68285]: DEBUG nova.compute.manager [req-8bebef3f-2486-42e1-a9c6-5307e59f9602 req-a96367ce-7ff0-4468-a9a1-b261dc6c128e service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-vif-plugged-b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1447.599739] env[68285]: DEBUG oslo_concurrency.lockutils [req-8bebef3f-2486-42e1-a9c6-5307e59f9602 req-a96367ce-7ff0-4468-a9a1-b261dc6c128e service nova] Acquiring lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1447.599898] env[68285]: DEBUG oslo_concurrency.lockutils [req-8bebef3f-2486-42e1-a9c6-5307e59f9602 req-a96367ce-7ff0-4468-a9a1-b261dc6c128e service nova] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1447.600107] env[68285]: DEBUG oslo_concurrency.lockutils [req-8bebef3f-2486-42e1-a9c6-5307e59f9602 req-a96367ce-7ff0-4468-a9a1-b261dc6c128e service nova] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.600261] env[68285]: DEBUG nova.compute.manager [req-8bebef3f-2486-42e1-a9c6-5307e59f9602 req-a96367ce-7ff0-4468-a9a1-b261dc6c128e service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] No waiting events found dispatching network-vif-plugged-b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1447.600397] env[68285]: WARNING nova.compute.manager [req-8bebef3f-2486-42e1-a9c6-5307e59f9602 req-a96367ce-7ff0-4468-a9a1-b261dc6c128e service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received unexpected event network-vif-plugged-b24692ad-b6df-4cc6-937d-61afea866aad for instance with vm_state active and task_state None. [ 1447.688847] env[68285]: DEBUG nova.network.neutron [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Successfully updated port: b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1448.194388] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.194483] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1448.194662] env[68285]: DEBUG nova.network.neutron [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1448.729658] env[68285]: WARNING nova.network.neutron [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d already exists in list: networks containing: ['c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d']. ignoring it [ 1449.048752] env[68285]: DEBUG nova.network.neutron [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b24692ad-b6df-4cc6-937d-61afea866aad", "address": "fa:16:3e:87:31:e1", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb24692ad-b6", "ovs_interfaceid": "b24692ad-b6df-4cc6-937d-61afea866aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.552159] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1449.552745] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.552900] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1449.553808] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90758b1d-2077-476c-8f14-3b592c45fb93 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.573481] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1449.573776] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1449.573975] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1449.574220] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1449.574400] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1449.574557] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1449.574763] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1449.574924] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1449.575135] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1449.575316] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1449.575557] env[68285]: DEBUG nova.virt.hardware [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1449.582336] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Reconfiguring VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1449.583111] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a58fd9b-9f5f-4a55-8be2-4bcb48e59bcf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.601560] env[68285]: DEBUG oslo_vmware.api [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1449.601560] env[68285]: value = "task-2892744" [ 1449.601560] env[68285]: _type = "Task" [ 1449.601560] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.611195] env[68285]: DEBUG oslo_vmware.api [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892744, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.626461] env[68285]: DEBUG nova.compute.manager [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-changed-b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1449.626789] env[68285]: DEBUG nova.compute.manager [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing instance network info cache due to event network-changed-b24692ad-b6df-4cc6-937d-61afea866aad. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1449.627118] env[68285]: DEBUG oslo_concurrency.lockutils [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.627368] env[68285]: DEBUG oslo_concurrency.lockutils [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1449.627621] env[68285]: DEBUG nova.network.neutron [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing network info cache for port b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.111789] env[68285]: DEBUG oslo_vmware.api [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.348111] env[68285]: DEBUG nova.network.neutron [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updated VIF entry in instance network info cache for port b24692ad-b6df-4cc6-937d-61afea866aad. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1450.348564] env[68285]: DEBUG nova.network.neutron [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b24692ad-b6df-4cc6-937d-61afea866aad", "address": "fa:16:3e:87:31:e1", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb24692ad-b6", "ovs_interfaceid": "b24692ad-b6df-4cc6-937d-61afea866aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.612560] env[68285]: DEBUG oslo_vmware.api [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892744, 'name': ReconfigVM_Task, 'duration_secs': 0.603455} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.613115] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1450.613408] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Reconfigured VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1450.851891] env[68285]: DEBUG oslo_concurrency.lockutils [req-6ea80e23-8e63-4744-8348-1e1cdd094581 req-92c074df-d2eb-46a6-b94c-2b21c932c752 service nova] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1451.118673] env[68285]: DEBUG oslo_concurrency.lockutils [None req-44989f74-0566-463b-95f7-51da7fe30c03 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-b24692ad-b6df-4cc6-937d-61afea866aad" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.109s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1452.701152] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-b24692ad-b6df-4cc6-937d-61afea866aad" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.701575] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-b24692ad-b6df-4cc6-937d-61afea866aad" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1453.204481] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.204692] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1453.205644] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e677f525-5b92-4c85-b077-10d836056125 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.223895] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b9c591-6e3b-4690-afbc-bd710d49089f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.249899] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Reconfiguring VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1453.250175] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7d33eb1-3b68-487c-ab18-12d75332b12f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.269823] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1453.269823] env[68285]: value = "task-2892745" [ 1453.269823] env[68285]: _type = "Task" [ 1453.269823] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.277341] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.330541] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522830c7-3ac9-bd4f-e97a-816eb4aa7c13/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1453.331482] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca05d7fb-5e68-4b5c-b9c1-a3677fb5603d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.338218] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522830c7-3ac9-bd4f-e97a-816eb4aa7c13/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1453.338218] env[68285]: ERROR oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522830c7-3ac9-bd4f-e97a-816eb4aa7c13/disk-0.vmdk due to incomplete transfer. [ 1453.338451] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dd355053-d446-4419-888d-2a37756abae5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.345358] env[68285]: DEBUG oslo_vmware.rw_handles [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522830c7-3ac9-bd4f-e97a-816eb4aa7c13/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1453.345548] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Uploaded image e4ec1404-d856-414d-bbe3-4d0fdba0b312 to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1453.348066] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1453.348301] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d1ff6ccd-3960-44f8-810f-222f90d654eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.353123] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1453.353123] env[68285]: value = "task-2892746" [ 1453.353123] env[68285]: _type = "Task" [ 1453.353123] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.361287] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892746, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.779670] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.862461] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892746, 'name': Destroy_Task, 'duration_secs': 0.384695} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.862716] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Destroyed the VM [ 1453.862929] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1453.863195] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4bc567a5-2008-456f-a8e9-587999ae7a84 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.869633] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1453.869633] env[68285]: value = "task-2892747" [ 1453.869633] env[68285]: _type = "Task" [ 1453.869633] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.876954] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892747, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.280540] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.379250] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892747, 'name': RemoveSnapshot_Task, 'duration_secs': 0.371038} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.379529] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1454.379800] env[68285]: DEBUG nova.compute.manager [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1454.380557] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e502f4fe-9f7d-4478-919e-8c45be31e542 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.783021] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.892488] env[68285]: INFO nova.compute.manager [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Shelve offloading [ 1455.282178] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.396137] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1455.396498] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2108c4ba-5114-4117-bdc0-f0b4f4fb6faa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.403277] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1455.403277] env[68285]: value = "task-2892748" [ 1455.403277] env[68285]: _type = "Task" [ 1455.403277] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.412774] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1455.412971] env[68285]: DEBUG nova.compute.manager [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1455.413687] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c21cbd-48b9-4fd3-8cbd-e3d238c14386 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.418965] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.419145] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1455.419316] env[68285]: DEBUG nova.network.neutron [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1455.783223] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.130542] env[68285]: DEBUG nova.network.neutron [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.283829] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.633061] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1456.784539] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.897053] env[68285]: DEBUG nova.compute.manager [req-9f14c390-fdca-4ac4-9ec6-16b7f1c0ecb1 req-09a15a62-87c6-4c49-b28c-332c3c6b7860 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-vif-unplugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1456.897348] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f14c390-fdca-4ac4-9ec6-16b7f1c0ecb1 req-09a15a62-87c6-4c49-b28c-332c3c6b7860 service nova] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.897471] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f14c390-fdca-4ac4-9ec6-16b7f1c0ecb1 req-09a15a62-87c6-4c49-b28c-332c3c6b7860 service nova] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.897676] env[68285]: DEBUG oslo_concurrency.lockutils [req-9f14c390-fdca-4ac4-9ec6-16b7f1c0ecb1 req-09a15a62-87c6-4c49-b28c-332c3c6b7860 service nova] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1456.897798] env[68285]: DEBUG nova.compute.manager [req-9f14c390-fdca-4ac4-9ec6-16b7f1c0ecb1 req-09a15a62-87c6-4c49-b28c-332c3c6b7860 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] No waiting events found dispatching network-vif-unplugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1456.897972] env[68285]: WARNING nova.compute.manager [req-9f14c390-fdca-4ac4-9ec6-16b7f1c0ecb1 req-09a15a62-87c6-4c49-b28c-332c3c6b7860 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received unexpected event network-vif-unplugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 for instance with vm_state shelved and task_state shelving_offloading. [ 1457.219575] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1457.220499] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84812da4-0cf9-4786-b6ba-46f8df8b96f4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.227847] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1457.228122] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0576164-30ea-447a-b425-b96ca62be7fa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.285057] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.298764] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1457.299040] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1457.299305] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleting the datastore file [datastore2] f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1457.299582] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ca168eb-e1b4-4581-b78a-939c225a4dbb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.306426] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1457.306426] env[68285]: value = "task-2892750" [ 1457.306426] env[68285]: _type = "Task" [ 1457.306426] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.313776] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.786617] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.814864] env[68285]: DEBUG oslo_vmware.api [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13076} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.815136] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1457.815321] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1457.815496] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1457.842318] env[68285]: INFO nova.scheduler.client.report [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted allocations for instance f9d35416-1f7f-4bf5-baba-1ce4e7436341 [ 1458.287943] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.347198] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1458.347466] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1458.347679] env[68285]: DEBUG nova.objects.instance [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'resources' on Instance uuid f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1458.787622] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.850494] env[68285]: DEBUG nova.objects.instance [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'numa_topology' on Instance uuid f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1458.921784] env[68285]: DEBUG nova.compute.manager [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1458.922056] env[68285]: DEBUG nova.compute.manager [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing instance network info cache due to event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1458.922222] env[68285]: DEBUG oslo_concurrency.lockutils [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] Acquiring lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.922314] env[68285]: DEBUG oslo_concurrency.lockutils [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] Acquired lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1458.922496] env[68285]: DEBUG nova.network.neutron [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1459.288255] env[68285]: DEBUG oslo_vmware.api [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892745, 'name': ReconfigVM_Task, 'duration_secs': 5.744343} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.288446] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1459.289023] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Reconfigured VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1459.353252] env[68285]: DEBUG nova.objects.base [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1459.407361] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08707fae-e09b-46df-aa0c-895015f0c38b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.416805] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89785a4-4f30-4cb4-8098-c550fd55ee50 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.449669] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bceeea74-105c-40d8-a983-d39bc01a82f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.458053] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59b3372-737d-440e-abb2-009edd2e5b45 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.472721] env[68285]: DEBUG nova.compute.provider_tree [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.715262] env[68285]: DEBUG nova.network.neutron [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updated VIF entry in instance network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1459.715663] env[68285]: DEBUG nova.network.neutron [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.976407] env[68285]: DEBUG nova.scheduler.client.report [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1460.218328] env[68285]: DEBUG oslo_concurrency.lockutils [req-ebbdb27c-78c6-4faf-9d70-a057c5b80057 req-9b876898-bc1b-462d-aecc-aab84f32e353 service nova] Releasing lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1460.481658] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.134s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.543056] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.629617] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.629832] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1460.630031] env[68285]: DEBUG nova.network.neutron [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1460.990490] env[68285]: DEBUG oslo_concurrency.lockutils [None req-eccd14ad-a037-4045-8353-2142711c6d9f tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.563s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.991126] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.448s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.991251] env[68285]: INFO nova.compute.manager [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Unshelving [ 1461.287455] env[68285]: DEBUG nova.compute.manager [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1461.287643] env[68285]: DEBUG nova.compute.manager [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing instance network info cache due to event network-changed-6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1461.287834] env[68285]: DEBUG oslo_concurrency.lockutils [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] Acquiring lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.328681] env[68285]: INFO nova.network.neutron [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Port b24692ad-b6df-4cc6-937d-61afea866aad from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1461.329049] env[68285]: DEBUG nova.network.neutron [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.831445] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1461.833730] env[68285]: DEBUG oslo_concurrency.lockutils [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] Acquired lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1461.833961] env[68285]: DEBUG nova.network.neutron [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Refreshing network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.966393] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-005f8c9a-8327-4c60-a016-0460ca42f65f-b24692ad-b6df-4cc6-937d-61afea866aad" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1461.966722] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-005f8c9a-8327-4c60-a016-0460ca42f65f-b24692ad-b6df-4cc6-937d-61afea866aad" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1461.967042] env[68285]: DEBUG nova.objects.instance [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'flavor' on Instance uuid 005f8c9a-8327-4c60-a016-0460ca42f65f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.000194] env[68285]: DEBUG nova.compute.utils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1462.336750] env[68285]: DEBUG oslo_concurrency.lockutils [None req-affe55a8-5b99-415b-aa04-aadadc1fccfd tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-b24692ad-b6df-4cc6-937d-61afea866aad" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.635s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1462.503103] env[68285]: INFO nova.virt.block_device [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Booting with volume 158231db-55db-48b6-a04f-63dcb2cdac56 at /dev/sdb [ 1462.525177] env[68285]: DEBUG nova.network.neutron [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updated VIF entry in instance network info cache for port 6228c66e-e1b1-4b17-bdb2-ae945380a77a. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1462.525177] env[68285]: DEBUG nova.network.neutron [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [{"id": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "address": "fa:16:3e:6d:6f:ad", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6228c66e-e1", "ovs_interfaceid": "6228c66e-e1b1-4b17-bdb2-ae945380a77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.540501] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03f8206f-5fa3-4b3d-a0f2-c974728f8999 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.550315] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f305d9-0540-487a-bb95-f61dd75a5752 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.561340] env[68285]: DEBUG nova.objects.instance [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'pci_requests' on Instance uuid 005f8c9a-8327-4c60-a016-0460ca42f65f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.579895] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c46ca31-bfae-4c66-a9da-2a0aba27a740 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.587818] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056ba64d-bb38-48ea-aa41-b635c127d3a1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.612550] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523c4433-afa8-4c32-b724-ebe148d0873c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.618443] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8081c3-590a-464b-ad5a-537eb0840a91 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.631864] env[68285]: DEBUG nova.virt.block_device [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating existing volume attachment record: 3e273bb1-cf7d-4668-96c7-0e4f00f64959 {{(pid=68285) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1463.028128] env[68285]: DEBUG oslo_concurrency.lockutils [req-227dd3ea-088c-425f-a22a-9c25fa942a38 req-94c99cf6-b43a-47ee-aff5-028587730d41 service nova] Releasing lock "refresh_cache-f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1463.064240] env[68285]: DEBUG nova.objects.base [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Object Instance<005f8c9a-8327-4c60-a016-0460ca42f65f> lazy-loaded attributes: flavor,pci_requests {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1463.064483] env[68285]: DEBUG nova.network.neutron [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1463.128492] env[68285]: DEBUG nova.policy [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '494447cb560a41dd9a3118745ac60554', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75a6837bced940cdaf5743b8e94cce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1463.312549] env[68285]: DEBUG nova.compute.manager [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1463.312870] env[68285]: DEBUG nova.compute.manager [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing instance network info cache due to event network-changed-379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1463.313103] env[68285]: DEBUG oslo_concurrency.lockutils [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.313103] env[68285]: DEBUG oslo_concurrency.lockutils [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1463.313256] env[68285]: DEBUG nova.network.neutron [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1464.020387] env[68285]: DEBUG nova.network.neutron [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updated VIF entry in instance network info cache for port 379dbcb8-f7be-4c47-87de-5f6c87635d90. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1464.020750] env[68285]: DEBUG nova.network.neutron [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.523266] env[68285]: DEBUG oslo_concurrency.lockutils [req-39977798-67d6-4236-beec-b70ed07dbe8b req-77b627bb-cc5c-4e85-8669-aaee933ffb3c service nova] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1464.552808] env[68285]: DEBUG nova.network.neutron [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Successfully updated port: b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1465.055454] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.055613] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1465.055798] env[68285]: DEBUG nova.network.neutron [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.338412] env[68285]: DEBUG nova.compute.manager [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-vif-plugged-b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1465.338634] env[68285]: DEBUG oslo_concurrency.lockutils [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] Acquiring lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.338841] env[68285]: DEBUG oslo_concurrency.lockutils [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.339050] env[68285]: DEBUG oslo_concurrency.lockutils [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1465.339325] env[68285]: DEBUG nova.compute.manager [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] No waiting events found dispatching network-vif-plugged-b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1465.339515] env[68285]: WARNING nova.compute.manager [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received unexpected event network-vif-plugged-b24692ad-b6df-4cc6-937d-61afea866aad for instance with vm_state active and task_state None. [ 1465.339710] env[68285]: DEBUG nova.compute.manager [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-changed-b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1465.339834] env[68285]: DEBUG nova.compute.manager [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing instance network info cache due to event network-changed-b24692ad-b6df-4cc6-937d-61afea866aad. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1465.340006] env[68285]: DEBUG oslo_concurrency.lockutils [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.589722] env[68285]: WARNING nova.network.neutron [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d already exists in list: networks containing: ['c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d']. ignoring it [ 1465.834179] env[68285]: DEBUG nova.network.neutron [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b24692ad-b6df-4cc6-937d-61afea866aad", "address": "fa:16:3e:87:31:e1", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb24692ad-b6", "ovs_interfaceid": "b24692ad-b6df-4cc6-937d-61afea866aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.337755] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1466.338402] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.338561] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1466.338854] env[68285]: DEBUG oslo_concurrency.lockutils [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1466.339073] env[68285]: DEBUG nova.network.neutron [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Refreshing network info cache for port b24692ad-b6df-4cc6-937d-61afea866aad {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1466.340928] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647f1f92-b836-4cf5-885f-4f26ed88150d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.358587] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1466.358794] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1466.358982] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1466.359188] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1466.359336] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1466.359483] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1466.359686] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1466.359845] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1466.360025] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1466.360195] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1466.360365] env[68285]: DEBUG nova.virt.hardware [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1466.366526] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Reconfiguring VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1466.367291] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ed8717e-a165-4584-bc45-1e334c5f4d55 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.384570] env[68285]: DEBUG oslo_vmware.api [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1466.384570] env[68285]: value = "task-2892755" [ 1466.384570] env[68285]: _type = "Task" [ 1466.384570] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.392320] env[68285]: DEBUG oslo_vmware.api [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.894847] env[68285]: DEBUG oslo_vmware.api [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892755, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.034529] env[68285]: DEBUG nova.network.neutron [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updated VIF entry in instance network info cache for port b24692ad-b6df-4cc6-937d-61afea866aad. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1467.034970] env[68285]: DEBUG nova.network.neutron [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b24692ad-b6df-4cc6-937d-61afea866aad", "address": "fa:16:3e:87:31:e1", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb24692ad-b6", "ovs_interfaceid": "b24692ad-b6df-4cc6-937d-61afea866aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.396535] env[68285]: DEBUG oslo_vmware.api [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892755, 'name': ReconfigVM_Task, 'duration_secs': 0.515682} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.397019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1467.397238] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Reconfigured VM to attach interface {{(pid=68285) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1467.538067] env[68285]: DEBUG oslo_concurrency.lockutils [req-b69b218e-5d25-4e54-8a6b-598df820d703 req-cfdb28ca-3799-4aa9-bfca-22ddaf22167f service nova] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1467.901804] env[68285]: DEBUG oslo_concurrency.lockutils [None req-7a059733-af95-4f98-94a6-1a17a103efed tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-005f8c9a-8327-4c60-a016-0460ca42f65f-b24692ad-b6df-4cc6-937d-61afea866aad" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.935s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1468.223178] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.223853] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.223955] env[68285]: DEBUG nova.objects.instance [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'pci_requests' on Instance uuid f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1468.727580] env[68285]: DEBUG nova.objects.instance [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'numa_topology' on Instance uuid f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1469.150275] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "interface-005f8c9a-8327-4c60-a016-0460ca42f65f-b24692ad-b6df-4cc6-937d-61afea866aad" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1469.150682] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-005f8c9a-8327-4c60-a016-0460ca42f65f-b24692ad-b6df-4cc6-937d-61afea866aad" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1469.231029] env[68285]: INFO nova.compute.claims [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1469.310110] env[68285]: DEBUG nova.compute.manager [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Stashing vm_state: active {{(pid=68285) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1469.654394] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.654394] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1469.654584] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9206e7b-48c2-4388-8c62-16c7ea78ec51 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.673373] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4349e03f-f9ea-46b1-9916-50c00e4805c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.697897] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Reconfiguring VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1469.698141] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b285b3c-aec5-464f-9a13-3756cdf2b313 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.715750] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1469.715750] env[68285]: value = "task-2892756" [ 1469.715750] env[68285]: _type = "Task" [ 1469.715750] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.724861] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.828038] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1470.225252] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.300714] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b23762-1f0c-47d3-9651-02f56dd88313 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.307863] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc9d3f4-c2dc-4f76-b69c-b0c7435fecd8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.336380] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b90e79-c59e-406e-bd8c-5efe38b019bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.343112] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eac2a51-4035-4279-b0a8-e9ff0a431e10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.357122] env[68285]: DEBUG nova.compute.provider_tree [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.725386] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.860134] env[68285]: DEBUG nova.scheduler.client.report [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1471.227433] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.364685] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.141s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1471.366893] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.539s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1471.394438] env[68285]: INFO nova.network.neutron [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating port 4cd7aff5-25ff-4491-b7b0-a079248d54f4 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1471.726989] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.871922] env[68285]: INFO nova.compute.claims [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1472.227323] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.378320] env[68285]: INFO nova.compute.resource_tracker [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating resource usage from migration 50c3dd7e-0bfd-459a-9039-5e01f5031633 [ 1472.442892] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9613486-1691-4bda-b3b4-70ca71aa188b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.450165] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0423a72-32d8-4222-8a06-cae2c00f676b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.480238] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b54a8b8-da2d-4908-8c26-d12391784478 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.486714] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d591c6f0-3ff5-45cc-8d48-70cb3f47b458 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.499042] env[68285]: DEBUG nova.compute.provider_tree [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.727909] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.860901] env[68285]: DEBUG nova.compute.manager [req-bde70078-3be2-408d-bf65-aaea02337717 req-0fa701e7-2585-48da-a5ce-726e4d6722ff service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-vif-plugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1472.861127] env[68285]: DEBUG oslo_concurrency.lockutils [req-bde70078-3be2-408d-bf65-aaea02337717 req-0fa701e7-2585-48da-a5ce-726e4d6722ff service nova] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1472.861293] env[68285]: DEBUG oslo_concurrency.lockutils [req-bde70078-3be2-408d-bf65-aaea02337717 req-0fa701e7-2585-48da-a5ce-726e4d6722ff service nova] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1472.861462] env[68285]: DEBUG oslo_concurrency.lockutils [req-bde70078-3be2-408d-bf65-aaea02337717 req-0fa701e7-2585-48da-a5ce-726e4d6722ff service nova] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1472.861627] env[68285]: DEBUG nova.compute.manager [req-bde70078-3be2-408d-bf65-aaea02337717 req-0fa701e7-2585-48da-a5ce-726e4d6722ff service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] No waiting events found dispatching network-vif-plugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1472.861788] env[68285]: WARNING nova.compute.manager [req-bde70078-3be2-408d-bf65-aaea02337717 req-0fa701e7-2585-48da-a5ce-726e4d6722ff service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received unexpected event network-vif-plugged-4cd7aff5-25ff-4491-b7b0-a079248d54f4 for instance with vm_state shelved_offloaded and task_state spawning. [ 1472.943373] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.943620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1472.943707] env[68285]: DEBUG nova.network.neutron [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1473.001793] env[68285]: DEBUG nova.scheduler.client.report [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1473.228622] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.505734] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.139s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1473.506018] env[68285]: INFO nova.compute.manager [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Migrating [ 1473.676111] env[68285]: DEBUG nova.network.neutron [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.728873] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.020599] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.020775] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1474.020959] env[68285]: DEBUG nova.network.neutron [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.179507] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1474.207317] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='edc31a929bf89c77769fa013a4506cec',container_format='bare',created_at=2025-03-10T16:02:00Z,direct_url=,disk_format='vmdk',id=e4ec1404-d856-414d-bbe3-4d0fdba0b312,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1769274936-shelved',owner='c272180eed81480fabd7e6d4dacc2613',properties=ImageMetaProps,protected=,size=31670272,status='active',tags=,updated_at=2025-03-10T16:02:14Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1474.207580] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1474.207739] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1474.207924] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1474.208088] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1474.208239] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1474.208445] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1474.208607] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1474.208775] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1474.208981] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1474.209183] env[68285]: DEBUG nova.virt.hardware [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1474.210033] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f41306-9a1b-4bb4-8f0a-bd7692e3214a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.218492] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb1e006-d6f7-4758-90a8-ab2a9ffe6746 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.237562] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:0f:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82ca17df-257e-40e6-9ec9-310ed6f05ccb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cd7aff5-25ff-4491-b7b0-a079248d54f4', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1474.245009] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1474.245259] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.245471] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1474.245669] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cf32f3f-5196-4f9b-b00b-66f1942199cb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.263833] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1474.263833] env[68285]: value = "task-2892757" [ 1474.263833] env[68285]: _type = "Task" [ 1474.263833] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.270823] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892757, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.716393] env[68285]: DEBUG nova.network.neutron [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.731927] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.772943] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892757, 'name': CreateVM_Task, 'duration_secs': 0.305135} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.773626] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1474.774081] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.774264] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1474.774647] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1474.774902] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf3ef2f4-54a0-40d5-a937-853185a147a5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.778970] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1474.778970] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e70ffe-1be8-9269-8f11-d8680743ffec" [ 1474.778970] env[68285]: _type = "Task" [ 1474.778970] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.786033] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e70ffe-1be8-9269-8f11-d8680743ffec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.884781] env[68285]: DEBUG nova.compute.manager [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1474.885018] env[68285]: DEBUG nova.compute.manager [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing instance network info cache due to event network-changed-4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1474.885248] env[68285]: DEBUG oslo_concurrency.lockutils [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] Acquiring lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.885393] env[68285]: DEBUG oslo_concurrency.lockutils [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] Acquired lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1474.885555] env[68285]: DEBUG nova.network.neutron [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Refreshing network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1475.219542] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1475.232206] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.287951] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1475.288212] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Processing image e4ec1404-d856-414d-bbe3-4d0fdba0b312 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1475.288445] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312/e4ec1404-d856-414d-bbe3-4d0fdba0b312.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.288592] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312/e4ec1404-d856-414d-bbe3-4d0fdba0b312.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1475.288771] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1475.289014] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7892664-51d5-43d9-973c-5b792f74891d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.297153] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1475.297362] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1475.298047] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0252a9ff-4a66-469a-93c8-d2395a4fbbee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.302562] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1475.302562] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52500c49-7c8b-3f96-7cd8-920f5efb8db6" [ 1475.302562] env[68285]: _type = "Task" [ 1475.302562] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.313877] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52500c49-7c8b-3f96-7cd8-920f5efb8db6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.457321] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.457581] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.457748] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.457896] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.578691] env[68285]: DEBUG nova.network.neutron [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updated VIF entry in instance network info cache for port 4cd7aff5-25ff-4491-b7b0-a079248d54f4. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1475.579056] env[68285]: DEBUG nova.network.neutron [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [{"id": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "address": "fa:16:3e:6b:0f:c0", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cd7aff5-25", "ovs_interfaceid": "4cd7aff5-25ff-4491-b7b0-a079248d54f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.735485] env[68285]: DEBUG oslo_vmware.api [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892756, 'name': ReconfigVM_Task, 'duration_secs': 5.738614} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.735485] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1475.735485] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Reconfigured VM to detach interface {{(pid=68285) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1475.812464] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Preparing fetch location {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1475.812722] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Fetch image to [datastore1] OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259/OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259.vmdk {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1475.812903] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Downloading stream optimized image e4ec1404-d856-414d-bbe3-4d0fdba0b312 to [datastore1] OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259/OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259.vmdk on the data store datastore1 as vApp {{(pid=68285) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1475.813083] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Downloading image file data e4ec1404-d856-414d-bbe3-4d0fdba0b312 to the ESX as VM named 'OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259' {{(pid=68285) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1475.866666] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.883034] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1475.883034] env[68285]: value = "resgroup-9" [ 1475.883034] env[68285]: _type = "ResourcePool" [ 1475.883034] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1475.883308] env[68285]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-cc893d02-287d-4cfd-a421-52937b81d56a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.904264] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease: (returnval){ [ 1475.904264] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd74e7-6d0f-6b7d-1bd9-0404b94ffcc1" [ 1475.904264] env[68285]: _type = "HttpNfcLease" [ 1475.904264] env[68285]: } obtained for vApp import into resource pool (val){ [ 1475.904264] env[68285]: value = "resgroup-9" [ 1475.904264] env[68285]: _type = "ResourcePool" [ 1475.904264] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1475.904607] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the lease: (returnval){ [ 1475.904607] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd74e7-6d0f-6b7d-1bd9-0404b94ffcc1" [ 1475.904607] env[68285]: _type = "HttpNfcLease" [ 1475.904607] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1475.911677] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1475.911677] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd74e7-6d0f-6b7d-1bd9-0404b94ffcc1" [ 1475.911677] env[68285]: _type = "HttpNfcLease" [ 1475.911677] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1476.082198] env[68285]: DEBUG oslo_concurrency.lockutils [req-6d618980-df7e-44b4-b2bf-be4dba2e5e8c req-ae611969-6e25-471c-899a-3b544865bd4e service nova] Releasing lock "refresh_cache-f9d35416-1f7f-4bf5-baba-1ce4e7436341" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1476.413645] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1476.413645] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd74e7-6d0f-6b7d-1bd9-0404b94ffcc1" [ 1476.413645] env[68285]: _type = "HttpNfcLease" [ 1476.413645] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1476.733500] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190fa6d8-4c6d-4a86-86ab-823a869e91c8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.752407] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance 'a8a67f90-047d-49ce-8de0-ee3e17998c6b' progress to 0 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1476.866373] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.866613] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.912915] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1476.912915] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd74e7-6d0f-6b7d-1bd9-0404b94ffcc1" [ 1476.912915] env[68285]: _type = "HttpNfcLease" [ 1476.912915] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1476.913261] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1476.913261] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52fd74e7-6d0f-6b7d-1bd9-0404b94ffcc1" [ 1476.913261] env[68285]: _type = "HttpNfcLease" [ 1476.913261] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1476.913925] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218184e5-7225-43fc-aa96-ad261d9873ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.922450] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b176-5d63-ca32-4a04-f872a1afbaee/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1476.922626] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating HTTP connection to write to file with size = 31670272 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b176-5d63-ca32-4a04-f872a1afbaee/disk-0.vmdk. {{(pid=68285) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1476.986064] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-31f46a0a-4104-454a-b824-2a669a66348d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.107461] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.107688] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquired lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1477.107918] env[68285]: DEBUG nova.network.neutron [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.258529] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.258820] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38d8bfc4-4865-4d19-967b-54aaa8c78b85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.267155] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1477.267155] env[68285]: value = "task-2892759" [ 1477.267155] env[68285]: _type = "Task" [ 1477.267155] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.277826] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.369331] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.369666] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.369876] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1477.370075] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1477.370970] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9d7659-f32b-4e12-8f68-ee2dc1764995 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.379024] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce9101e-27f9-4d12-9a0a-bf70f6828595 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.393810] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e121348b-a105-41c9-a023-8b752bc95604 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.401674] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e7ddc5-f1e1-47fa-82df-2d737ff95212 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.434099] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180627MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1477.434336] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.434596] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.522767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "005f8c9a-8327-4c60-a016-0460ca42f65f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.523062] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.523285] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.523472] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.523639] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1477.525841] env[68285]: INFO nova.compute.manager [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Terminating instance [ 1477.780241] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892759, 'name': PowerOffVM_Task, 'duration_secs': 0.194884} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.782739] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1477.782934] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance 'a8a67f90-047d-49ce-8de0-ee3e17998c6b' progress to 17 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1477.838922] env[68285]: INFO nova.network.neutron [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Port b24692ad-b6df-4cc6-937d-61afea866aad from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1477.839300] env[68285]: DEBUG nova.network.neutron [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [{"id": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "address": "fa:16:3e:32:ee:cc", "network": {"id": "c76ebf7e-b7e9-4bfd-92d4-c39ee821cd3d", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1043653527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75a6837bced940cdaf5743b8e94cce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap379dbcb8-f7", "ovs_interfaceid": "379dbcb8-f7be-4c47-87de-5f6c87635d90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.026555] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Completed reading data from the image iterator. {{(pid=68285) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1478.026755] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b176-5d63-ca32-4a04-f872a1afbaee/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1478.027642] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6668fd4-50c6-4316-8239-0f56b44d28a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.031078] env[68285]: DEBUG nova.compute.manager [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1478.031311] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.032159] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4606552e-6bdf-447a-9208-80dc9f99f236 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.038677] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b176-5d63-ca32-4a04-f872a1afbaee/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1478.038849] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b176-5d63-ca32-4a04-f872a1afbaee/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1478.040830] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4beab376-5c3d-4d71-b4e1-3f8b2f12aacd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.042532] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1478.042532] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4450f144-43d7-483b-8b96-a0e6494af60a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.048861] env[68285]: DEBUG oslo_vmware.api [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1478.048861] env[68285]: value = "task-2892760" [ 1478.048861] env[68285]: _type = "Task" [ 1478.048861] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.056361] env[68285]: DEBUG oslo_vmware.api [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.290221] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1478.290525] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.290797] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1478.290941] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.291233] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1478.291422] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1478.291629] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1478.291790] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1478.292074] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1478.292256] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1478.292461] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1478.297718] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cb70f30-63d6-48aa-8b34-35a188cdc017 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.309635] env[68285]: DEBUG oslo_vmware.rw_handles [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a3b176-5d63-ca32-4a04-f872a1afbaee/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1478.309846] env[68285]: INFO nova.virt.vmwareapi.images [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Downloaded image file data e4ec1404-d856-414d-bbe3-4d0fdba0b312 [ 1478.311117] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8011470-9210-45fc-85cc-5d1d8c994e9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.315440] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1478.315440] env[68285]: value = "task-2892761" [ 1478.315440] env[68285]: _type = "Task" [ 1478.315440] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.329849] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fc064d7-5e6f-448a-aa07-68d7230843a5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.336754] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.344506] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Releasing lock "refresh_cache-005f8c9a-8327-4c60-a016-0460ca42f65f" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1478.377328] env[68285]: INFO nova.virt.vmwareapi.images [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] The imported VM was unregistered [ 1478.379939] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Caching image {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1478.380188] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating directory with path [datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1478.380465] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f35655a-4704-41d9-96f8-b9c983e7d732 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.407863] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created directory with path [datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312 {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1478.408058] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259/OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259.vmdk to [datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312/e4ec1404-d856-414d-bbe3-4d0fdba0b312.vmdk. {{(pid=68285) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1478.408352] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-999bf84a-e906-416d-b4f4-46474ed19810 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.415720] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1478.415720] env[68285]: value = "task-2892763" [ 1478.415720] env[68285]: _type = "Task" [ 1478.415720] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.423218] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892763, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.449293] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Applying migration context for instance a8a67f90-047d-49ce-8de0-ee3e17998c6b as it has an incoming, in-progress migration 50c3dd7e-0bfd-459a-9039-5e01f5031633. Migration status is migrating {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1478.450220] env[68285]: INFO nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating resource usage from migration 50c3dd7e-0bfd-459a-9039-5e01f5031633 [ 1478.468810] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1478.468958] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 005f8c9a-8327-4c60-a016-0460ca42f65f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1478.469102] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance f9d35416-1f7f-4bf5-baba-1ce4e7436341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1478.469250] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Migration 50c3dd7e-0bfd-459a-9039-5e01f5031633 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1478.469380] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance a8a67f90-047d-49ce-8de0-ee3e17998c6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1478.469555] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1478.469687] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1478.542565] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8f4dba-9e08-44af-9704-fe902625141d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.549797] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7300322-f1fd-4d26-9ef2-4bbc64a7e9d5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.564335] env[68285]: DEBUG oslo_vmware.api [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892760, 'name': PowerOffVM_Task, 'duration_secs': 0.231153} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.589135] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.589344] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.589854] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8fab980-bb8d-4ab3-aa20-d6d8a5f34d89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.592124] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4421a4-b34b-43ba-847e-5899797b9f26 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.599680] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6b4856-c4f6-4d67-ac23-7b135c80b73f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.613200] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.661332] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.661576] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.661732] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleting the datastore file [datastore2] 005f8c9a-8327-4c60-a016-0460ca42f65f {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.662057] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2adbd559-b882-41e3-8a85-158c0f2dc0b5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.668358] env[68285]: DEBUG oslo_vmware.api [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1478.668358] env[68285]: value = "task-2892765" [ 1478.668358] env[68285]: _type = "Task" [ 1478.668358] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.676524] env[68285]: DEBUG oslo_vmware.api [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.837753] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892761, 'name': ReconfigVM_Task, 'duration_secs': 0.257202} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.838158] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance 'a8a67f90-047d-49ce-8de0-ee3e17998c6b' progress to 33 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1478.849083] env[68285]: DEBUG oslo_concurrency.lockutils [None req-37d9b64d-d1ee-4768-aead-c002179ab1ca tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "interface-005f8c9a-8327-4c60-a016-0460ca42f65f-b24692ad-b6df-4cc6-937d-61afea866aad" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.698s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1478.929182] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892763, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.116481] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1479.180556] env[68285]: DEBUG oslo_vmware.api [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330523} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.180904] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.181013] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.181504] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.181504] env[68285]: INFO nova.compute.manager [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1479.181665] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1479.181905] env[68285]: DEBUG nova.compute.manager [-] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1479.182038] env[68285]: DEBUG nova.network.neutron [-] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.344505] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1479.344742] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1479.344888] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1479.345086] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1479.345274] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1479.345454] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1479.345668] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1479.345828] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1479.345995] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1479.346239] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1479.346387] env[68285]: DEBUG nova.virt.hardware [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1479.352592] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1479.353160] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02315965-e5ad-4c4d-b9a7-ce8eab3f7ba3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.374088] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1479.374088] env[68285]: value = "task-2892766" [ 1479.374088] env[68285]: _type = "Task" [ 1479.374088] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.390371] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892766, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.429903] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892763, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.622057] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1479.622240] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.188s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1479.886994] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892766, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.928788] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892763, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.298202] env[68285]: DEBUG nova.compute.manager [req-e2f66f5d-0e79-4daa-9bca-2b5ede070355 req-68ffd550-ff18-41c3-a8eb-3d1694367f03 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Received event network-vif-deleted-379dbcb8-f7be-4c47-87de-5f6c87635d90 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1480.298202] env[68285]: INFO nova.compute.manager [req-e2f66f5d-0e79-4daa-9bca-2b5ede070355 req-68ffd550-ff18-41c3-a8eb-3d1694367f03 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Neutron deleted interface 379dbcb8-f7be-4c47-87de-5f6c87635d90; detaching it from the instance and deleting it from the info cache [ 1480.298575] env[68285]: DEBUG nova.network.neutron [req-e2f66f5d-0e79-4daa-9bca-2b5ede070355 req-68ffd550-ff18-41c3-a8eb-3d1694367f03 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.389359] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892766, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.429476] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892763, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.622414] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1480.622602] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1480.781452] env[68285]: DEBUG nova.network.neutron [-] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.800995] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc2d8b3f-951e-48e5-b865-b74c47bb3a42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.810564] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80d11e0-d87f-4712-b678-96a5ec192819 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.836415] env[68285]: DEBUG nova.compute.manager [req-e2f66f5d-0e79-4daa-9bca-2b5ede070355 req-68ffd550-ff18-41c3-a8eb-3d1694367f03 service nova] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Detach interface failed, port_id=379dbcb8-f7be-4c47-87de-5f6c87635d90, reason: Instance 005f8c9a-8327-4c60-a016-0460ca42f65f could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1480.886476] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892766, 'name': ReconfigVM_Task, 'duration_secs': 1.23983} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.886770] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1480.887646] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126f1577-1739-45b7-9190-9a9a93d3b053 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.909528] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1480.909797] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b89af98a-78dd-4419-914d-2438ca0ac767 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.930431] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892763, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.343408} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.931543] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259/OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259.vmdk to [datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312/e4ec1404-d856-414d-bbe3-4d0fdba0b312.vmdk. [ 1480.931737] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Cleaning up location [datastore1] OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1480.931913] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_38d9e935-64b0-425f-a1b5-cf17c86db259 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1480.932226] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1480.932226] env[68285]: value = "task-2892767" [ 1480.932226] env[68285]: _type = "Task" [ 1480.932226] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.932413] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b3a7e59-02e2-4fd2-803c-d42ca7059b9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.944326] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892767, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.945494] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1480.945494] env[68285]: value = "task-2892768" [ 1480.945494] env[68285]: _type = "Task" [ 1480.945494] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.952647] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.284463] env[68285]: INFO nova.compute.manager [-] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Took 2.10 seconds to deallocate network for instance. [ 1481.443274] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892767, 'name': ReconfigVM_Task, 'duration_secs': 0.269867} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.443544] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfigured VM instance instance-0000007d to attach disk [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1481.443800] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance 'a8a67f90-047d-49ce-8de0-ee3e17998c6b' progress to 50 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1481.455023] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034795} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.455270] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1481.455437] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312/e4ec1404-d856-414d-bbe3-4d0fdba0b312.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1481.455665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312/e4ec1404-d856-414d-bbe3-4d0fdba0b312.vmdk to [datastore1] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1481.455902] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa52cf1c-81ec-4f6c-97da-eca9a3d60e62 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.461436] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1481.461436] env[68285]: value = "task-2892769" [ 1481.461436] env[68285]: _type = "Task" [ 1481.461436] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.468536] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.791043] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.791329] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1481.791554] env[68285]: DEBUG nova.objects.instance [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'resources' on Instance uuid 005f8c9a-8327-4c60-a016-0460ca42f65f {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1481.952644] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5d54d0-a661-4835-909a-aa1d939716d3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.979422] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094c26fe-9d3d-40e2-916b-37f0f722957d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.997824] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance 'a8a67f90-047d-49ce-8de0-ee3e17998c6b' progress to 67 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1482.004385] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892769, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.384442] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22a00f3-170e-4d10-954c-b536f6371416 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.395811] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d601fed-4792-4bb9-8637-321647f3a6b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.429953] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79fdee1-938f-4e7b-a956-e5f183ce95c9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.441359] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94d766d-f232-4096-b9c4-ad3e56f22e72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.459657] env[68285]: DEBUG nova.compute.provider_tree [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.486781] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892769, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.542783] env[68285]: DEBUG nova.network.neutron [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Port 7c7b1a5b-57fa-4ae1-a454-3256e454042e binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1482.963055] env[68285]: DEBUG nova.scheduler.client.report [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1482.988040] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892769, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.468801] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.488354] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892769, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.489509] env[68285]: INFO nova.scheduler.client.report [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted allocations for instance 005f8c9a-8327-4c60-a016-0460ca42f65f [ 1483.567867] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1483.568164] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1483.568331] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.987288] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892769, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.31963} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.987636] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e4ec1404-d856-414d-bbe3-4d0fdba0b312/e4ec1404-d856-414d-bbe3-4d0fdba0b312.vmdk to [datastore1] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1483.988229] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8f468f-4cec-44d2-bb24-d67d459f9f14 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.012686] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1484.013214] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8c75e99d-624f-44e2-bdc0-133ad8972032 tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "005f8c9a-8327-4c60-a016-0460ca42f65f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.490s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.014097] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-440da629-1c03-4a4c-8b28-f062d345489e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.033855] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1484.033855] env[68285]: value = "task-2892770" [ 1484.033855] env[68285]: _type = "Task" [ 1484.033855] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.041855] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.213360] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1484.213748] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1484.214090] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1484.214371] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1484.214620] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.216756] env[68285]: INFO nova.compute.manager [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Terminating instance [ 1484.543651] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892770, 'name': ReconfigVM_Task, 'duration_secs': 0.279956} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.543922] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfigured VM instance instance-00000079 to attach disk [datastore1] f9d35416-1f7f-4bf5-baba-1ce4e7436341/f9d35416-1f7f-4bf5-baba-1ce4e7436341.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.545085] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'size': 0, 'encryption_options': None, 'boot_index': 0, 'disk_bus': None, 'encryption_secret_uuid': None, 'encrypted': False, 'device_type': 'disk', 'guest_format': None, 'encryption_format': None, 'image_id': 'ce84ab4c-9913-42dc-b839-714ad2184867'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'boot_index': None, 'disk_bus': None, 'guest_format': None, 'device_type': None, 'attachment_id': '3e273bb1-cf7d-4668-96c7-0e4f00f64959', 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581114', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'name': 'volume-158231db-55db-48b6-a04f-63dcb2cdac56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'f9d35416-1f7f-4bf5-baba-1ce4e7436341', 'attached_at': '', 'detached_at': '', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'serial': '158231db-55db-48b6-a04f-63dcb2cdac56'}, 'volume_type': None}], 'swap': None} {{(pid=68285) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1484.545294] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Volume attach. Driver type: vmdk {{(pid=68285) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1484.545484] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581114', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'name': 'volume-158231db-55db-48b6-a04f-63dcb2cdac56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'f9d35416-1f7f-4bf5-baba-1ce4e7436341', 'attached_at': '', 'detached_at': '', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'serial': '158231db-55db-48b6-a04f-63dcb2cdac56'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1484.546262] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c299720d-2a16-4f02-b3d8-09d9f8494021 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.561280] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a74237e-a3f7-4e8b-8de4-aa6c8816510a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.587966] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] volume-158231db-55db-48b6-a04f-63dcb2cdac56/volume-158231db-55db-48b6-a04f-63dcb2cdac56.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1484.588218] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d61f19bd-e1ef-4d51-b67d-20330ae8271f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.606082] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1484.606082] env[68285]: value = "task-2892771" [ 1484.606082] env[68285]: _type = "Task" [ 1484.606082] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.613427] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.629662] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.629836] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1484.630023] env[68285]: DEBUG nova.network.neutron [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1484.720727] env[68285]: DEBUG nova.compute.manager [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1484.720954] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1484.721853] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f314bb-897e-46b0-98cd-f88088c05514 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.729564] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1484.729782] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab2e3f9c-3288-4902-a55b-c5d0cb74c26e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.736584] env[68285]: DEBUG oslo_vmware.api [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1484.736584] env[68285]: value = "task-2892772" [ 1484.736584] env[68285]: _type = "Task" [ 1484.736584] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.744199] env[68285]: DEBUG oslo_vmware.api [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.117096] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892771, 'name': ReconfigVM_Task, 'duration_secs': 0.291498} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.117096] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfigured VM instance instance-00000079 to attach disk [datastore1] volume-158231db-55db-48b6-a04f-63dcb2cdac56/volume-158231db-55db-48b6-a04f-63dcb2cdac56.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1485.121162] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1e325b0-d317-4978-9457-977291c9b074 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.135495] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1485.135495] env[68285]: value = "task-2892773" [ 1485.135495] env[68285]: _type = "Task" [ 1485.135495] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.142964] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892773, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.245922] env[68285]: DEBUG oslo_vmware.api [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892772, 'name': PowerOffVM_Task, 'duration_secs': 0.20543} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.248342] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1485.248516] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1485.248787] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e895a6c3-1f99-409d-8810-cd66aa2cdb8e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.317974] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1485.318221] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1485.318407] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleting the datastore file [datastore1] f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1485.318660] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1ed0621-eda4-4060-9a54-336fe51a8a44 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.324580] env[68285]: DEBUG oslo_vmware.api [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for the task: (returnval){ [ 1485.324580] env[68285]: value = "task-2892775" [ 1485.324580] env[68285]: _type = "Task" [ 1485.324580] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.331673] env[68285]: DEBUG oslo_vmware.api [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.336076] env[68285]: DEBUG nova.network.neutron [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.645888] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892773, 'name': ReconfigVM_Task, 'duration_secs': 0.151949} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.646199] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581114', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'name': 'volume-158231db-55db-48b6-a04f-63dcb2cdac56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'f9d35416-1f7f-4bf5-baba-1ce4e7436341', 'attached_at': '', 'detached_at': '', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'serial': '158231db-55db-48b6-a04f-63dcb2cdac56'} {{(pid=68285) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1485.646800] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03ad016e-edb0-4f65-9b71-05a5b68ce5c5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.652888] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1485.652888] env[68285]: value = "task-2892776" [ 1485.652888] env[68285]: _type = "Task" [ 1485.652888] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.660238] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892776, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.834543] env[68285]: DEBUG oslo_vmware.api [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Task: {'id': task-2892775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142288} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.834795] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1485.834981] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1485.835190] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1485.835362] env[68285]: INFO nova.compute.manager [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1485.835601] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1485.835791] env[68285]: DEBUG nova.compute.manager [-] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1485.835887] env[68285]: DEBUG nova.network.neutron [-] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1485.839113] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1486.111250] env[68285]: DEBUG nova.compute.manager [req-2c9cb53d-4a8b-46ae-a9fe-dc813d3c27a4 req-2e2df08f-bd5a-4f4f-9a68-1d5747f97f10 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Received event network-vif-deleted-6228c66e-e1b1-4b17-bdb2-ae945380a77a {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1486.111461] env[68285]: INFO nova.compute.manager [req-2c9cb53d-4a8b-46ae-a9fe-dc813d3c27a4 req-2e2df08f-bd5a-4f4f-9a68-1d5747f97f10 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Neutron deleted interface 6228c66e-e1b1-4b17-bdb2-ae945380a77a; detaching it from the instance and deleting it from the info cache [ 1486.111656] env[68285]: DEBUG nova.network.neutron [req-2c9cb53d-4a8b-46ae-a9fe-dc813d3c27a4 req-2e2df08f-bd5a-4f4f-9a68-1d5747f97f10 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.163028] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892776, 'name': Rename_Task, 'duration_secs': 0.139633} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.163340] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1486.163530] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dad3411d-6f07-4b32-8cb7-78ad9aa0c48f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.169629] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1486.169629] env[68285]: value = "task-2892777" [ 1486.169629] env[68285]: _type = "Task" [ 1486.169629] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.176895] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.364139] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4ab749-b762-4919-9558-e27703bcdeb3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.383805] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160a7b5e-9d76-4a29-8881-f873879ab8bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.391151] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance 'a8a67f90-047d-49ce-8de0-ee3e17998c6b' progress to 83 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1486.596077] env[68285]: DEBUG nova.network.neutron [-] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.614587] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c4ef618-ca3e-41d3-a067-afa75066a343 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.623671] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70161041-ce47-4bfd-9b6b-a88203556c2c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.648323] env[68285]: DEBUG nova.compute.manager [req-2c9cb53d-4a8b-46ae-a9fe-dc813d3c27a4 req-2e2df08f-bd5a-4f4f-9a68-1d5747f97f10 service nova] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Detach interface failed, port_id=6228c66e-e1b1-4b17-bdb2-ae945380a77a, reason: Instance f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1486.677979] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892777, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.902617] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1486.902916] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56b5315c-cc18-4ef9-89a1-e7dae23c9b93 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.910333] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1486.910333] env[68285]: value = "task-2892778" [ 1486.910333] env[68285]: _type = "Task" [ 1486.910333] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.917507] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892778, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.098502] env[68285]: INFO nova.compute.manager [-] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Took 1.26 seconds to deallocate network for instance. [ 1487.178992] env[68285]: DEBUG oslo_vmware.api [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892777, 'name': PowerOnVM_Task, 'duration_secs': 0.54597} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.179371] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1487.278741] env[68285]: DEBUG nova.compute.manager [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1487.279820] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5264ac3f-8db4-40a2-a714-160172405c06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.419967] env[68285]: DEBUG oslo_vmware.api [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892778, 'name': PowerOnVM_Task, 'duration_secs': 0.411486} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.420244] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1487.420428] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-0746d456-2f0f-4680-813a-cb924af52179 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance 'a8a67f90-047d-49ce-8de0-ee3e17998c6b' progress to 100 {{(pid=68285) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1487.605637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.605910] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.606148] env[68285]: DEBUG nova.objects.instance [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lazy-loading 'resources' on Instance uuid f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1487.796638] env[68285]: DEBUG oslo_concurrency.lockutils [None req-0b5b624a-bae5-4852-9dce-c2525820fa60 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.805s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1488.167076] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc0491e-ef86-434b-9725-916866dd7e83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.175022] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9bd478-b9a7-4f58-b3d8-8535e6202228 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.205036] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4021965-5101-4e48-aca5-6632c551d0bf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.211960] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d30213-5690-4a8a-8dd4-11beefefef11 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.227382] env[68285]: DEBUG nova.compute.provider_tree [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.730200] env[68285]: DEBUG nova.scheduler.client.report [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.235163] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.629s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.253642] env[68285]: INFO nova.scheduler.client.report [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Deleted allocations for instance f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b [ 1489.761932] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d6c9c59e-e717-4260-9839-f3fad61d722c tempest-AttachInterfacesTestJSON-215364715 tempest-AttachInterfacesTestJSON-215364715-project-member] Lock "f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.548s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.121727] env[68285]: DEBUG nova.network.neutron [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Port 7c7b1a5b-57fa-4ae1-a454-3256e454042e binding to destination host cpu-1 is already ACTIVE {{(pid=68285) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1490.121999] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.122223] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1490.122420] env[68285]: DEBUG nova.network.neutron [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1491.009586] env[68285]: DEBUG nova.network.neutron [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.515824] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1492.018759] env[68285]: DEBUG nova.compute.manager [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68285) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1492.019148] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1492.019271] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1492.524456] env[68285]: DEBUG nova.objects.instance [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'migration_context' on Instance uuid a8a67f90-047d-49ce-8de0-ee3e17998c6b {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.083621] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234da217-dd37-415e-b6e2-bed977d13607 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.093102] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b614623-0caa-4b51-b22b-6778df727b29 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.124056] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d59d5d-ccc3-49d2-b61d-1328d02d01b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.131560] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bc94df-5fac-434e-8d4a-ec49c7a9a998 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.145321] env[68285]: DEBUG nova.compute.provider_tree [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.651022] env[68285]: DEBUG nova.scheduler.client.report [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1494.662023] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.642s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1496.200219] env[68285]: INFO nova.compute.manager [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Swapping old allocation on dict_keys(['7bdf675d-15ae-4a4b-9c03-79d8c773b76b']) held by migration 50c3dd7e-0bfd-459a-9039-5e01f5031633 for instance [ 1496.223571] env[68285]: DEBUG nova.scheduler.client.report [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Overwriting current allocation {'allocations': {'7bdf675d-15ae-4a4b-9c03-79d8c773b76b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 189}}, 'project_id': '43491d0bdffc49eaaad084f3124cffcb', 'user_id': '41e116b3ac9d4c7386847a5559ea313c', 'consumer_generation': 1} on consumer a8a67f90-047d-49ce-8de0-ee3e17998c6b {{(pid=68285) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1496.233234] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1496.233802] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1496.298848] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.298848] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1496.298848] env[68285]: DEBUG nova.network.neutron [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1496.738540] env[68285]: DEBUG nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1496.999062] env[68285]: DEBUG nova.network.neutron [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [{"id": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "address": "fa:16:3e:56:de:67", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7b1a5b-57", "ovs_interfaceid": "7c7b1a5b-57fa-4ae1-a454-3256e454042e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.258573] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1497.258837] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1497.260540] env[68285]: INFO nova.compute.claims [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1497.501983] env[68285]: DEBUG oslo_concurrency.lockutils [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-a8a67f90-047d-49ce-8de0-ee3e17998c6b" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1497.502509] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.502802] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-084bf256-61a0-4dbc-b743-b46b3ae17f80 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.510021] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1497.510021] env[68285]: value = "task-2892779" [ 1497.510021] env[68285]: _type = "Task" [ 1497.510021] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.518105] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.020311] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892779, 'name': PowerOffVM_Task, 'duration_secs': 0.19932} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.020617] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.021325] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1498.021493] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1498.021649] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1498.021831] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1498.021974] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1498.022160] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1498.022400] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1498.022563] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1498.022727] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1498.022887] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1498.023070] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1498.028089] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d86f9d4-c075-4de5-a1df-5c457e5f44a7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.043487] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1498.043487] env[68285]: value = "task-2892780" [ 1498.043487] env[68285]: _type = "Task" [ 1498.043487] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.051992] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892780, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.320187] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78295639-ef38-477a-9ec3-12c50da56046 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.327957] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784b6dff-b569-4930-8f9d-c9db8a1d0da3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.357080] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63af615d-264f-479b-a5bd-aa7d9c11c62a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.363841] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700a171c-e476-4d33-87a2-a33544186dc6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.376608] env[68285]: DEBUG nova.compute.provider_tree [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.553197] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892780, 'name': ReconfigVM_Task, 'duration_secs': 0.14278} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.554012] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa51cbe-0a2c-4bcd-921f-8c5508464e95 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.571355] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1498.571597] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1498.571754] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1498.571936] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1498.572130] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1498.572301] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1498.572505] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1498.572661] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1498.572824] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1498.572984] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1498.573179] env[68285]: DEBUG nova.virt.hardware [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1498.573927] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa6a0faf-5db9-4801-b56a-8a2d1b66979b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.579639] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1498.579639] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d50452-71f3-7fe7-0ed1-0c0f2ffcec84" [ 1498.579639] env[68285]: _type = "Task" [ 1498.579639] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.588631] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d50452-71f3-7fe7-0ed1-0c0f2ffcec84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.879668] env[68285]: DEBUG nova.scheduler.client.report [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1499.090352] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d50452-71f3-7fe7-0ed1-0c0f2ffcec84, 'name': SearchDatastore_Task, 'duration_secs': 0.008736} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.095544] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1499.095818] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92d2cdb9-771c-4d3e-bf89-2da084f2be2b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.113300] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1499.113300] env[68285]: value = "task-2892781" [ 1499.113300] env[68285]: _type = "Task" [ 1499.113300] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.120524] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892781, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.384929] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.126s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.385491] env[68285]: DEBUG nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1499.622872] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892781, 'name': ReconfigVM_Task, 'duration_secs': 0.189733} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.623163] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1499.623920] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddc1123-7157-46ae-81fd-c6a4022777c3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.644994] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.645218] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f399acd-0c8b-4dbe-bb0c-b47844b8cf5b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.662508] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1499.662508] env[68285]: value = "task-2892782" [ 1499.662508] env[68285]: _type = "Task" [ 1499.662508] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.669510] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892782, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.890790] env[68285]: DEBUG nova.compute.utils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1499.892123] env[68285]: DEBUG nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1499.892310] env[68285]: DEBUG nova.network.neutron [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1499.936171] env[68285]: DEBUG nova.policy [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c352cb59eef4b098f77e4fa9eddfbf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '632eb76477654f03aa243d38db04a430', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1500.167238] env[68285]: DEBUG nova.network.neutron [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Successfully created port: 3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1500.174212] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892782, 'name': ReconfigVM_Task, 'duration_secs': 0.278704} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.174487] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Reconfigured VM instance instance-0000007d to attach disk [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b/a8a67f90-047d-49ce-8de0-ee3e17998c6b.vmdk or device None with type thin {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1500.175301] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150e761b-07f9-44cc-a949-5345db70dac9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.193396] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9d834b-fccf-4213-a18d-f2d9fe5ae2da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.211677] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1895b800-24a0-4e75-bc69-9c43514cbb3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.228525] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0452c3de-7d22-4015-8196-27ac2f88da9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.234767] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1500.234983] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92f923f7-2487-4c04-8f7a-2b641ccebeb9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.240935] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1500.240935] env[68285]: value = "task-2892783" [ 1500.240935] env[68285]: _type = "Task" [ 1500.240935] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.247705] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.395353] env[68285]: DEBUG nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1500.750262] env[68285]: DEBUG oslo_vmware.api [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892783, 'name': PowerOnVM_Task, 'duration_secs': 0.356918} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.750540] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1501.404302] env[68285]: DEBUG nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1501.431390] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1501.431635] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.431789] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1501.431970] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.432168] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1501.432350] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1501.432564] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1501.432721] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1501.432885] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1501.433063] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1501.433244] env[68285]: DEBUG nova.virt.hardware [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1501.434319] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6f6451-db44-41a3-b7f5-60af54a3daa0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.441652] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8963874-fac9-47f0-8d52-2de39827bd5e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.643775] env[68285]: DEBUG nova.compute.manager [req-1ec59fb3-8caf-461b-affd-2bc46edc28c1 req-e6087a0d-3824-4f47-903e-e2b1765256a5 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Received event network-vif-plugged-3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1501.643999] env[68285]: DEBUG oslo_concurrency.lockutils [req-1ec59fb3-8caf-461b-affd-2bc46edc28c1 req-e6087a0d-3824-4f47-903e-e2b1765256a5 service nova] Acquiring lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1501.644243] env[68285]: DEBUG oslo_concurrency.lockutils [req-1ec59fb3-8caf-461b-affd-2bc46edc28c1 req-e6087a0d-3824-4f47-903e-e2b1765256a5 service nova] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1501.644457] env[68285]: DEBUG oslo_concurrency.lockutils [req-1ec59fb3-8caf-461b-affd-2bc46edc28c1 req-e6087a0d-3824-4f47-903e-e2b1765256a5 service nova] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1501.644612] env[68285]: DEBUG nova.compute.manager [req-1ec59fb3-8caf-461b-affd-2bc46edc28c1 req-e6087a0d-3824-4f47-903e-e2b1765256a5 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] No waiting events found dispatching network-vif-plugged-3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1501.644774] env[68285]: WARNING nova.compute.manager [req-1ec59fb3-8caf-461b-affd-2bc46edc28c1 req-e6087a0d-3824-4f47-903e-e2b1765256a5 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Received unexpected event network-vif-plugged-3c86daf5-dd08-47db-a3c9-f356b57b59c3 for instance with vm_state building and task_state spawning. [ 1501.761089] env[68285]: INFO nova.compute.manager [None req-9491fce7-6d82-49ef-a245-ece0c4bdd9ee tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance to original state: 'active' [ 1502.304960] env[68285]: DEBUG nova.network.neutron [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Successfully updated port: 3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1502.325242] env[68285]: DEBUG nova.compute.manager [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Received event network-changed-3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1502.325445] env[68285]: DEBUG nova.compute.manager [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Refreshing instance network info cache due to event network-changed-3c86daf5-dd08-47db-a3c9-f356b57b59c3. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1502.325666] env[68285]: DEBUG oslo_concurrency.lockutils [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] Acquiring lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.325784] env[68285]: DEBUG oslo_concurrency.lockutils [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] Acquired lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1502.325942] env[68285]: DEBUG nova.network.neutron [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Refreshing network info cache for port 3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1502.807209] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.855907] env[68285]: DEBUG nova.network.neutron [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1502.891623] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.891864] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1502.892093] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.892308] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1502.892488] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1502.894495] env[68285]: INFO nova.compute.manager [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Terminating instance [ 1502.926288] env[68285]: DEBUG nova.network.neutron [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.400518] env[68285]: DEBUG nova.compute.manager [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1503.400788] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1503.401730] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a131625-e474-4e2f-b144-42670154623a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.409577] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1503.409822] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-029292aa-55c5-4843-91f6-e9c168161aed {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.416977] env[68285]: DEBUG oslo_vmware.api [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1503.416977] env[68285]: value = "task-2892784" [ 1503.416977] env[68285]: _type = "Task" [ 1503.416977] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.426150] env[68285]: DEBUG oslo_vmware.api [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.428670] env[68285]: DEBUG oslo_concurrency.lockutils [req-76650c5d-1a51-45f9-8daf-949a600783d8 req-107a422c-bca7-495b-a1f4-0795b6274010 service nova] Releasing lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1503.428986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquired lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1503.429155] env[68285]: DEBUG nova.network.neutron [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1503.927612] env[68285]: DEBUG oslo_vmware.api [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892784, 'name': PowerOffVM_Task, 'duration_secs': 0.231207} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.928023] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1503.928023] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1503.928276] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8c2e63b-2ef4-40b3-b412-1e3a122648b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.959670] env[68285]: DEBUG nova.network.neutron [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1504.083208] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1504.083425] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1504.083604] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleting the datastore file [datastore2] a8a67f90-047d-49ce-8de0-ee3e17998c6b {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1504.083862] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56404426-6c79-44a8-933c-ff40fb366a85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.087847] env[68285]: DEBUG nova.network.neutron [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Updating instance_info_cache with network_info: [{"id": "3c86daf5-dd08-47db-a3c9-f356b57b59c3", "address": "fa:16:3e:cd:e0:49", "network": {"id": "7f9e4a66-c7c0-479f-8715-f9d27e55e28b", "bridge": "br-int", "label": "tempest-ServersTestJSON-972822710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "632eb76477654f03aa243d38db04a430", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c86daf5-dd", "ovs_interfaceid": "3c86daf5-dd08-47db-a3c9-f356b57b59c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.089954] env[68285]: DEBUG oslo_vmware.api [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1504.089954] env[68285]: value = "task-2892786" [ 1504.089954] env[68285]: _type = "Task" [ 1504.089954] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.097509] env[68285]: DEBUG oslo_vmware.api [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.591630] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Releasing lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1504.591977] env[68285]: DEBUG nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Instance network_info: |[{"id": "3c86daf5-dd08-47db-a3c9-f356b57b59c3", "address": "fa:16:3e:cd:e0:49", "network": {"id": "7f9e4a66-c7c0-479f-8715-f9d27e55e28b", "bridge": "br-int", "label": "tempest-ServersTestJSON-972822710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "632eb76477654f03aa243d38db04a430", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c86daf5-dd", "ovs_interfaceid": "3c86daf5-dd08-47db-a3c9-f356b57b59c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1504.592510] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:e0:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c86daf5-dd08-47db-a3c9-f356b57b59c3', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1504.599891] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Creating folder: Project (632eb76477654f03aa243d38db04a430). Parent ref: group-v580775. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1504.603024] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06868fd2-6204-47ac-a155-fc943b45c3b4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.609560] env[68285]: DEBUG oslo_vmware.api [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892786, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174593} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.609780] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1504.609953] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1504.610140] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1504.610310] env[68285]: INFO nova.compute.manager [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1504.610540] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1504.610731] env[68285]: DEBUG nova.compute.manager [-] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1504.610862] env[68285]: DEBUG nova.network.neutron [-] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1504.613595] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Created folder: Project (632eb76477654f03aa243d38db04a430) in parent group-v580775. [ 1504.613772] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Creating folder: Instances. Parent ref: group-v581117. {{(pid=68285) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1504.614285] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef8ae5b6-5901-4564-bcbb-58b409f5093c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.621525] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Created folder: Instances in parent group-v581117. [ 1504.621739] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1504.621913] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1504.622113] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4e04645-f03e-4d69-bab2-3f586c0056ea {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.641071] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1504.641071] env[68285]: value = "task-2892789" [ 1504.641071] env[68285]: _type = "Task" [ 1504.641071] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.647896] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892789, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.069401] env[68285]: DEBUG nova.compute.manager [req-42d0ba27-b67e-48b4-adff-61a324be6005 req-7bd48d45-d5e7-4d10-bed3-4f543de17dfc service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Received event network-vif-deleted-7c7b1a5b-57fa-4ae1-a454-3256e454042e {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1505.069659] env[68285]: INFO nova.compute.manager [req-42d0ba27-b67e-48b4-adff-61a324be6005 req-7bd48d45-d5e7-4d10-bed3-4f543de17dfc service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Neutron deleted interface 7c7b1a5b-57fa-4ae1-a454-3256e454042e; detaching it from the instance and deleting it from the info cache [ 1505.069911] env[68285]: DEBUG nova.network.neutron [req-42d0ba27-b67e-48b4-adff-61a324be6005 req-7bd48d45-d5e7-4d10-bed3-4f543de17dfc service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.151522] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892789, 'name': CreateVM_Task, 'duration_secs': 0.357542} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.151696] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1505.152478] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.152623] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1505.152936] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1505.153204] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fe918b8-16f3-4df4-acf3-b0707e976f08 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.157506] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1505.157506] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d934f1-f7b3-ce3d-9332-a1dbd4a494fc" [ 1505.157506] env[68285]: _type = "Task" [ 1505.157506] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.165090] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d934f1-f7b3-ce3d-9332-a1dbd4a494fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.546768] env[68285]: DEBUG nova.network.neutron [-] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.572545] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2f81baf-6070-4a1d-a32e-8c655746ac69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.582236] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29566e6c-b1e7-43e9-b4a8-01803e368a57 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.606933] env[68285]: DEBUG nova.compute.manager [req-42d0ba27-b67e-48b4-adff-61a324be6005 req-7bd48d45-d5e7-4d10-bed3-4f543de17dfc service nova] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Detach interface failed, port_id=7c7b1a5b-57fa-4ae1-a454-3256e454042e, reason: Instance a8a67f90-047d-49ce-8de0-ee3e17998c6b could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1505.668613] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d934f1-f7b3-ce3d-9332-a1dbd4a494fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011705} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.668886] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1505.669129] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1505.669362] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.669664] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1505.669664] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1505.669918] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89747673-f6ca-4462-8a6d-bc9a48b34017 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.678867] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1505.679177] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1505.679737] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16058c78-b667-49ce-b98f-3686a9cf71ec {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.684705] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1505.684705] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52bb2cd6-9bce-5e89-613c-d9e5f70da7f2" [ 1505.684705] env[68285]: _type = "Task" [ 1505.684705] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.692342] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bb2cd6-9bce-5e89-613c-d9e5f70da7f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.049868] env[68285]: INFO nova.compute.manager [-] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Took 1.44 seconds to deallocate network for instance. [ 1506.195380] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52bb2cd6-9bce-5e89-613c-d9e5f70da7f2, 'name': SearchDatastore_Task, 'duration_secs': 0.012429} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.196121] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7d5c8c3-6c29-419a-8492-fd2fbf2d6d1a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.201537] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1506.201537] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5205fa6a-b028-c462-296f-26cbc92b2867" [ 1506.201537] env[68285]: _type = "Task" [ 1506.201537] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.208683] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5205fa6a-b028-c462-296f-26cbc92b2867, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.556943] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.556943] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1506.556943] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1506.583437] env[68285]: INFO nova.scheduler.client.report [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted allocations for instance a8a67f90-047d-49ce-8de0-ee3e17998c6b [ 1506.712468] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5205fa6a-b028-c462-296f-26cbc92b2867, 'name': SearchDatastore_Task, 'duration_secs': 0.009669} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.712728] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1506.712977] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 7cda8bbd-a75f-4a6a-8905-3f387fcbd624/7cda8bbd-a75f-4a6a-8905-3f387fcbd624.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1506.713240] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ecfcbbc-a7b1-4028-ac61-bc0c9362bf7d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.719318] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1506.719318] env[68285]: value = "task-2892790" [ 1506.719318] env[68285]: _type = "Task" [ 1506.719318] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.726686] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892790, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.092033] env[68285]: DEBUG oslo_concurrency.lockutils [None req-b98cb06f-e5bb-44f0-8f94-04c9a706d371 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "a8a67f90-047d-49ce-8de0-ee3e17998c6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.199s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.229145] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892790, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487026} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.229480] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 7cda8bbd-a75f-4a6a-8905-3f387fcbd624/7cda8bbd-a75f-4a6a-8905-3f387fcbd624.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1507.229695] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1507.229946] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87ba6e5a-a2ed-429e-aace-27fa7a67e36e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.235863] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1507.235863] env[68285]: value = "task-2892791" [ 1507.235863] env[68285]: _type = "Task" [ 1507.235863] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.244079] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.746264] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066755} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.746547] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1507.747308] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac80c1b-9f05-45cc-8517-a04bfd55eee1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.768382] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 7cda8bbd-a75f-4a6a-8905-3f387fcbd624/7cda8bbd-a75f-4a6a-8905-3f387fcbd624.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1507.768634] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fe4a8b6-41db-4124-93ef-a123c9640c3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.789087] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1507.789087] env[68285]: value = "task-2892792" [ 1507.789087] env[68285]: _type = "Task" [ 1507.789087] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.796278] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892792, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.834131] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "9583f10c-00be-4712-8018-04d642c9c597" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.834411] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.300094] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892792, 'name': ReconfigVM_Task, 'duration_secs': 0.282262} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.300396] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 7cda8bbd-a75f-4a6a-8905-3f387fcbd624/7cda8bbd-a75f-4a6a-8905-3f387fcbd624.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1508.300954] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11aab63a-b26e-4b42-a062-8fadca0ed831 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.307045] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1508.307045] env[68285]: value = "task-2892793" [ 1508.307045] env[68285]: _type = "Task" [ 1508.307045] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.313869] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892793, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.336495] env[68285]: DEBUG nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1508.816340] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892793, 'name': Rename_Task, 'duration_secs': 0.135567} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.816632] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1508.816871] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f33d27e0-24c5-4f62-a07c-73cdfd32360d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.822893] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1508.822893] env[68285]: value = "task-2892794" [ 1508.822893] env[68285]: _type = "Task" [ 1508.822893] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.830241] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892794, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.859189] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1508.859680] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.861566] env[68285]: INFO nova.compute.claims [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1509.333337] env[68285]: DEBUG oslo_vmware.api [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892794, 'name': PowerOnVM_Task, 'duration_secs': 0.44802} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.333713] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1509.333762] env[68285]: INFO nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Took 7.93 seconds to spawn the instance on the hypervisor. [ 1509.333920] env[68285]: DEBUG nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1509.334723] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013abbb0-c6af-4a4f-a404-6bbc0a3913c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.852974] env[68285]: INFO nova.compute.manager [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Took 12.61 seconds to build instance. [ 1509.925458] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d07d414-0789-4525-86fe-45c1c00b6cd3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.933494] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9332f7-5191-466d-8002-aad8de81fe3c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.972451] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae711791-1e43-4e5f-86e8-ce6de2e3644a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.979905] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c667068-1333-4bac-985b-465537e22f3d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.993120] env[68285]: DEBUG nova.compute.provider_tree [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1510.354767] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c2962f12-87c4-4410-ac2e-d9fc7911536c tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.121s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1510.380667] env[68285]: DEBUG nova.compute.manager [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Received event network-changed-3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1510.380739] env[68285]: DEBUG nova.compute.manager [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Refreshing instance network info cache due to event network-changed-3c86daf5-dd08-47db-a3c9-f356b57b59c3. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1510.380924] env[68285]: DEBUG oslo_concurrency.lockutils [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] Acquiring lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.381080] env[68285]: DEBUG oslo_concurrency.lockutils [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] Acquired lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1510.381293] env[68285]: DEBUG nova.network.neutron [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Refreshing network info cache for port 3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.496309] env[68285]: DEBUG nova.scheduler.client.report [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1511.000709] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1511.001214] env[68285]: DEBUG nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1511.080509] env[68285]: DEBUG nova.network.neutron [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Updated VIF entry in instance network info cache for port 3c86daf5-dd08-47db-a3c9-f356b57b59c3. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1511.080943] env[68285]: DEBUG nova.network.neutron [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Updating instance_info_cache with network_info: [{"id": "3c86daf5-dd08-47db-a3c9-f356b57b59c3", "address": "fa:16:3e:cd:e0:49", "network": {"id": "7f9e4a66-c7c0-479f-8715-f9d27e55e28b", "bridge": "br-int", "label": "tempest-ServersTestJSON-972822710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "632eb76477654f03aa243d38db04a430", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c86daf5-dd", "ovs_interfaceid": "3c86daf5-dd08-47db-a3c9-f356b57b59c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.506516] env[68285]: DEBUG nova.compute.utils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1511.507860] env[68285]: DEBUG nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1511.508050] env[68285]: DEBUG nova.network.neutron [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1511.542898] env[68285]: DEBUG nova.policy [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41e116b3ac9d4c7386847a5559ea313c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43491d0bdffc49eaaad084f3124cffcb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1511.583250] env[68285]: DEBUG oslo_concurrency.lockutils [req-b4e16273-1dc5-4849-a372-3497cf9f7456 req-11bec98d-27b8-4b7c-93b7-980ea78bb3a7 service nova] Releasing lock "refresh_cache-7cda8bbd-a75f-4a6a-8905-3f387fcbd624" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1511.790411] env[68285]: DEBUG nova.network.neutron [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Successfully created port: b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1512.011556] env[68285]: DEBUG nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1513.021101] env[68285]: DEBUG nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1513.047390] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1513.047652] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.047824] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1513.048021] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1513.048169] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1513.048368] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1513.048520] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1513.048675] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1513.048877] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1513.049058] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1513.049235] env[68285]: DEBUG nova.virt.hardware [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1513.050121] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ac4b44-1dbb-462a-9e03-c010b8addb9a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.059625] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a30ffc-ce6e-4333-84b3-1ab26fe6de39 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.155133] env[68285]: DEBUG nova.compute.manager [req-4a0d336c-1a3f-4aaa-92b6-0769dfde9fa5 req-c92784d7-6b51-4e7e-af59-24a03df9a908 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Received event network-vif-plugged-b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1513.155396] env[68285]: DEBUG oslo_concurrency.lockutils [req-4a0d336c-1a3f-4aaa-92b6-0769dfde9fa5 req-c92784d7-6b51-4e7e-af59-24a03df9a908 service nova] Acquiring lock "9583f10c-00be-4712-8018-04d642c9c597-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1513.155595] env[68285]: DEBUG oslo_concurrency.lockutils [req-4a0d336c-1a3f-4aaa-92b6-0769dfde9fa5 req-c92784d7-6b51-4e7e-af59-24a03df9a908 service nova] Lock "9583f10c-00be-4712-8018-04d642c9c597-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1513.155841] env[68285]: DEBUG oslo_concurrency.lockutils [req-4a0d336c-1a3f-4aaa-92b6-0769dfde9fa5 req-c92784d7-6b51-4e7e-af59-24a03df9a908 service nova] Lock "9583f10c-00be-4712-8018-04d642c9c597-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1513.155926] env[68285]: DEBUG nova.compute.manager [req-4a0d336c-1a3f-4aaa-92b6-0769dfde9fa5 req-c92784d7-6b51-4e7e-af59-24a03df9a908 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] No waiting events found dispatching network-vif-plugged-b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1513.156114] env[68285]: WARNING nova.compute.manager [req-4a0d336c-1a3f-4aaa-92b6-0769dfde9fa5 req-c92784d7-6b51-4e7e-af59-24a03df9a908 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Received unexpected event network-vif-plugged-b34597a1-042a-4358-9952-2daf4a1a35bb for instance with vm_state building and task_state spawning. [ 1513.245240] env[68285]: DEBUG nova.network.neutron [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Successfully updated port: b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1513.748775] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.749015] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1513.749108] env[68285]: DEBUG nova.network.neutron [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1514.279994] env[68285]: DEBUG nova.network.neutron [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1514.399800] env[68285]: DEBUG nova.network.neutron [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updating instance_info_cache with network_info: [{"id": "b34597a1-042a-4358-9952-2daf4a1a35bb", "address": "fa:16:3e:a8:55:10", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34597a1-04", "ovs_interfaceid": "b34597a1-042a-4358-9952-2daf4a1a35bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.903420] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1514.903728] env[68285]: DEBUG nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Instance network_info: |[{"id": "b34597a1-042a-4358-9952-2daf4a1a35bb", "address": "fa:16:3e:a8:55:10", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34597a1-04", "ovs_interfaceid": "b34597a1-042a-4358-9952-2daf4a1a35bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1514.904182] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:55:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb0e556a-0f69-4a5c-af62-ffc46edb8e63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b34597a1-042a-4358-9952-2daf4a1a35bb', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1514.911552] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1514.911764] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1514.911988] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a59fcb38-918f-4246-b513-fe57821fb032 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.932919] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1514.932919] env[68285]: value = "task-2892795" [ 1514.932919] env[68285]: _type = "Task" [ 1514.932919] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.940443] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892795, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.182178] env[68285]: DEBUG nova.compute.manager [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Received event network-changed-b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1515.182458] env[68285]: DEBUG nova.compute.manager [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Refreshing instance network info cache due to event network-changed-b34597a1-042a-4358-9952-2daf4a1a35bb. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1515.182703] env[68285]: DEBUG oslo_concurrency.lockutils [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] Acquiring lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.182905] env[68285]: DEBUG oslo_concurrency.lockutils [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] Acquired lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1515.183149] env[68285]: DEBUG nova.network.neutron [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Refreshing network info cache for port b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1515.442976] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892795, 'name': CreateVM_Task, 'duration_secs': 0.3104} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.443337] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1515.443811] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.443974] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1515.444333] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1515.444899] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0783f2a8-bcd8-4809-9fa9-599b65c24fb9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.449344] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1515.449344] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d4127d-11dd-57f0-666f-b84f859cf2ce" [ 1515.449344] env[68285]: _type = "Task" [ 1515.449344] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.456767] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d4127d-11dd-57f0-666f-b84f859cf2ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.856649] env[68285]: DEBUG nova.network.neutron [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updated VIF entry in instance network info cache for port b34597a1-042a-4358-9952-2daf4a1a35bb. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1515.856997] env[68285]: DEBUG nova.network.neutron [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updating instance_info_cache with network_info: [{"id": "b34597a1-042a-4358-9952-2daf4a1a35bb", "address": "fa:16:3e:a8:55:10", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34597a1-04", "ovs_interfaceid": "b34597a1-042a-4358-9952-2daf4a1a35bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.959658] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52d4127d-11dd-57f0-666f-b84f859cf2ce, 'name': SearchDatastore_Task, 'duration_secs': 0.010061} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.959950] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1515.960195] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1515.960422] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.960566] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1515.960740] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.960988] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2594a29-9dd2-469a-a116-d52f625f3b62 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.969072] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.969213] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1515.969859] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaa2902a-5e27-4936-9c24-fbf9bd236298 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.976748] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1515.976748] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b7946e-37fb-e55f-1034-19968d951997" [ 1515.976748] env[68285]: _type = "Task" [ 1515.976748] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.983867] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b7946e-37fb-e55f-1034-19968d951997, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.359624] env[68285]: DEBUG oslo_concurrency.lockutils [req-b56ad5e0-a4b9-416e-b0e9-971d0d6abc48 req-fa8916cd-5d9e-4773-9ca0-6807e689561d service nova] Releasing lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1516.487188] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b7946e-37fb-e55f-1034-19968d951997, 'name': SearchDatastore_Task, 'duration_secs': 0.010582} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.487927] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dd96a88-ae52-49ac-b17d-727bc7800ad3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.492700] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1516.492700] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52e4eb0a-2dee-cf4a-6743-889775415603" [ 1516.492700] env[68285]: _type = "Task" [ 1516.492700] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.499807] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e4eb0a-2dee-cf4a-6743-889775415603, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.004042] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52e4eb0a-2dee-cf4a-6743-889775415603, 'name': SearchDatastore_Task, 'duration_secs': 0.009955} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.004042] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1517.004042] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9583f10c-00be-4712-8018-04d642c9c597/9583f10c-00be-4712-8018-04d642c9c597.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1517.004042] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42806bd3-9faa-43af-83be-a509ee4babd3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.010834] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1517.010834] env[68285]: value = "task-2892796" [ 1517.010834] env[68285]: _type = "Task" [ 1517.010834] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.018273] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892796, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.519977] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892796, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460472} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.520365] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore2] 9583f10c-00be-4712-8018-04d642c9c597/9583f10c-00be-4712-8018-04d642c9c597.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1517.520460] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1517.520709] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96318cac-f6e0-4a84-b0ab-292f71422bf8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.527221] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1517.527221] env[68285]: value = "task-2892797" [ 1517.527221] env[68285]: _type = "Task" [ 1517.527221] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.534745] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892797, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.036829] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0699} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.037125] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1518.037914] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac49bdac-7f79-4b1c-9e18-bafeaa23a742 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.059178] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] 9583f10c-00be-4712-8018-04d642c9c597/9583f10c-00be-4712-8018-04d642c9c597.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1518.059464] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1610c87-100d-43ef-be59-aedc0f9c7e4c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.078725] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1518.078725] env[68285]: value = "task-2892798" [ 1518.078725] env[68285]: _type = "Task" [ 1518.078725] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.111965] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.589303] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.090160] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892798, 'name': ReconfigVM_Task, 'duration_secs': 0.689171} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.090452] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Reconfigured VM instance instance-0000007f to attach disk [datastore2] 9583f10c-00be-4712-8018-04d642c9c597/9583f10c-00be-4712-8018-04d642c9c597.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1519.091091] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a640f0e-c061-45aa-907e-7caa0d0f2418 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.097730] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1519.097730] env[68285]: value = "task-2892799" [ 1519.097730] env[68285]: _type = "Task" [ 1519.097730] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.107814] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892799, 'name': Rename_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.607266] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892799, 'name': Rename_Task, 'duration_secs': 0.1863} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.607646] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1519.607774] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8365b3a1-e207-48eb-a8e6-afd088064e0d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.613439] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1519.613439] env[68285]: value = "task-2892800" [ 1519.613439] env[68285]: _type = "Task" [ 1519.613439] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.620313] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892800, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.122516] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892800, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.624671] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892800, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.123409] env[68285]: DEBUG oslo_vmware.api [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892800, 'name': PowerOnVM_Task, 'duration_secs': 1.028916} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.123665] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1521.123863] env[68285]: INFO nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1521.124055] env[68285]: DEBUG nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1521.124814] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07ed72f-7646-4cfc-b5e9-1db79808011d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.643257] env[68285]: INFO nova.compute.manager [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Took 12.80 seconds to build instance. [ 1521.896184] env[68285]: DEBUG nova.compute.manager [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Received event network-changed-b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1521.896384] env[68285]: DEBUG nova.compute.manager [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Refreshing instance network info cache due to event network-changed-b34597a1-042a-4358-9952-2daf4a1a35bb. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1521.896595] env[68285]: DEBUG oslo_concurrency.lockutils [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] Acquiring lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.896737] env[68285]: DEBUG oslo_concurrency.lockutils [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] Acquired lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1521.896894] env[68285]: DEBUG nova.network.neutron [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Refreshing network info cache for port b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1522.144986] env[68285]: DEBUG oslo_concurrency.lockutils [None req-26710302-7d2f-42ae-a0bd-9596eb482060 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.310s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1522.602408] env[68285]: DEBUG nova.network.neutron [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updated VIF entry in instance network info cache for port b34597a1-042a-4358-9952-2daf4a1a35bb. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1522.602758] env[68285]: DEBUG nova.network.neutron [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updating instance_info_cache with network_info: [{"id": "b34597a1-042a-4358-9952-2daf4a1a35bb", "address": "fa:16:3e:a8:55:10", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34597a1-04", "ovs_interfaceid": "b34597a1-042a-4358-9952-2daf4a1a35bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.105584] env[68285]: DEBUG oslo_concurrency.lockutils [req-d3cd5d32-00f5-4952-8379-cdea1e965c5a req-32afde77-5c3a-444a-93b2-99d1497063d2 service nova] Releasing lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1523.654999] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1523.655282] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1524.158175] env[68285]: INFO nova.compute.manager [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Detaching volume 158231db-55db-48b6-a04f-63dcb2cdac56 [ 1524.187442] env[68285]: INFO nova.virt.block_device [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Attempting to driver detach volume 158231db-55db-48b6-a04f-63dcb2cdac56 from mountpoint /dev/sdb [ 1524.187688] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Volume detach. Driver type: vmdk {{(pid=68285) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1524.187872] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581114', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'name': 'volume-158231db-55db-48b6-a04f-63dcb2cdac56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'f9d35416-1f7f-4bf5-baba-1ce4e7436341', 'attached_at': '', 'detached_at': '', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'serial': '158231db-55db-48b6-a04f-63dcb2cdac56'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1524.188761] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1cb1cf-5652-4863-a917-c7893ae7644f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.210695] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0587d8e4-0a9c-4904-814f-f6b0d3dc40d9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.217106] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6295bb-8dc2-4170-a153-c5e2dcd447ab {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.245197] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5e9820-3e2e-4e70-8531-64b6bd498b22 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.265885] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] The volume has not been displaced from its original location: [datastore1] volume-158231db-55db-48b6-a04f-63dcb2cdac56/volume-158231db-55db-48b6-a04f-63dcb2cdac56.vmdk. No consolidation needed. {{(pid=68285) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1524.274346] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfiguring VM instance instance-00000079 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1524.274673] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ca3a38c-57d0-4929-bf6e-6ae93d76663c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.297414] env[68285]: DEBUG oslo_vmware.api [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1524.297414] env[68285]: value = "task-2892801" [ 1524.297414] env[68285]: _type = "Task" [ 1524.297414] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.304627] env[68285]: DEBUG oslo_vmware.api [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.806709] env[68285]: DEBUG oslo_vmware.api [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892801, 'name': ReconfigVM_Task, 'duration_secs': 0.219036} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.807013] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Reconfigured VM instance instance-00000079 to detach disk 2001 {{(pid=68285) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1524.811534] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02afcb2f-de33-4164-a961-7cca1cbf7c0f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.827148] env[68285]: DEBUG oslo_vmware.api [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1524.827148] env[68285]: value = "task-2892802" [ 1524.827148] env[68285]: _type = "Task" [ 1524.827148] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.834149] env[68285]: DEBUG oslo_vmware.api [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892802, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.337407] env[68285]: DEBUG oslo_vmware.api [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892802, 'name': ReconfigVM_Task, 'duration_secs': 0.136231} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.337712] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581114', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'name': 'volume-158231db-55db-48b6-a04f-63dcb2cdac56', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'f9d35416-1f7f-4bf5-baba-1ce4e7436341', 'attached_at': '', 'detached_at': '', 'volume_id': '158231db-55db-48b6-a04f-63dcb2cdac56', 'serial': '158231db-55db-48b6-a04f-63dcb2cdac56'} {{(pid=68285) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1525.876166] env[68285]: DEBUG nova.objects.instance [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'flavor' on Instance uuid f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1526.883474] env[68285]: DEBUG oslo_concurrency.lockutils [None req-d906959f-e2bc-4462-b0ab-b036273a15b5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.228s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1527.894433] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1527.894866] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1527.894924] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1527.895126] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1527.895300] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1527.897414] env[68285]: INFO nova.compute.manager [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Terminating instance [ 1528.401309] env[68285]: DEBUG nova.compute.manager [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1528.401678] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1528.402579] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f7185e-6532-474f-9d2e-d37b9974212f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.411174] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1528.411384] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67a7bf85-6b53-4912-96cf-4c2e51e8be69 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.417114] env[68285]: DEBUG oslo_vmware.api [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1528.417114] env[68285]: value = "task-2892803" [ 1528.417114] env[68285]: _type = "Task" [ 1528.417114] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.425888] env[68285]: DEBUG oslo_vmware.api [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892803, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.926471] env[68285]: DEBUG oslo_vmware.api [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892803, 'name': PowerOffVM_Task, 'duration_secs': 0.178615} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.926814] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1528.926951] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1528.927209] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a6c7e29-eb61-41bb-8aad-bfd76ff17a85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.994206] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1528.994382] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1528.994597] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleting the datastore file [datastore1] f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1528.994873] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dac9cd25-133c-4f61-b97c-1cea1fe3b33c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.001481] env[68285]: DEBUG oslo_vmware.api [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1529.001481] env[68285]: value = "task-2892805" [ 1529.001481] env[68285]: _type = "Task" [ 1529.001481] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.009043] env[68285]: DEBUG oslo_vmware.api [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.510779] env[68285]: DEBUG oslo_vmware.api [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126555} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.511043] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1529.511233] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1529.511404] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1529.511574] env[68285]: INFO nova.compute.manager [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1529.511818] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1529.512013] env[68285]: DEBUG nova.compute.manager [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1529.512117] env[68285]: DEBUG nova.network.neutron [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1529.953624] env[68285]: DEBUG nova.compute.manager [req-065b2cfb-c8b7-459a-9dc1-4bc73a4a26a8 req-a2589019-79dd-43db-82a0-3c4085b0348f service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Received event network-vif-deleted-4cd7aff5-25ff-4491-b7b0-a079248d54f4 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1529.954032] env[68285]: INFO nova.compute.manager [req-065b2cfb-c8b7-459a-9dc1-4bc73a4a26a8 req-a2589019-79dd-43db-82a0-3c4085b0348f service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Neutron deleted interface 4cd7aff5-25ff-4491-b7b0-a079248d54f4; detaching it from the instance and deleting it from the info cache [ 1529.954032] env[68285]: DEBUG nova.network.neutron [req-065b2cfb-c8b7-459a-9dc1-4bc73a4a26a8 req-a2589019-79dd-43db-82a0-3c4085b0348f service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.432258] env[68285]: DEBUG nova.network.neutron [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.457085] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-116dc503-051a-4b6a-8535-134e5cdc09a3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.466731] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d092008-fd3b-4e10-b8dc-2ed51d820c58 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.492253] env[68285]: DEBUG nova.compute.manager [req-065b2cfb-c8b7-459a-9dc1-4bc73a4a26a8 req-a2589019-79dd-43db-82a0-3c4085b0348f service nova] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Detach interface failed, port_id=4cd7aff5-25ff-4491-b7b0-a079248d54f4, reason: Instance f9d35416-1f7f-4bf5-baba-1ce4e7436341 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1530.934842] env[68285]: INFO nova.compute.manager [-] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Took 1.42 seconds to deallocate network for instance. [ 1531.441105] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1531.441446] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1531.441626] env[68285]: DEBUG nova.objects.instance [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'resources' on Instance uuid f9d35416-1f7f-4bf5-baba-1ce4e7436341 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1531.994366] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b062ffb-21e2-4cc9-91ca-b712dd5c12a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.001580] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a0a1b3-5a0b-4ec7-a527-55ba1d564e3a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.030990] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eabd4e-e55b-4fc8-80fc-d0415da65c42 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.037557] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b97c25-eef9-4cd7-a29a-be7bccd8cc77 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.049977] env[68285]: DEBUG nova.compute.provider_tree [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.552771] env[68285]: DEBUG nova.scheduler.client.report [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1532.866225] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.057642] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.616s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1533.075667] env[68285]: INFO nova.scheduler.client.report [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted allocations for instance f9d35416-1f7f-4bf5-baba-1ce4e7436341 [ 1533.583207] env[68285]: DEBUG oslo_concurrency.lockutils [None req-5d6b9f26-7e30-443a-9dda-58813fb5f378 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "f9d35416-1f7f-4bf5-baba-1ce4e7436341" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.688s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1533.865936] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.866143] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.728454] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1535.728679] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1535.861331] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.230758] env[68285]: DEBUG nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Starting instance... {{(pid=68285) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1536.752923] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1536.753322] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1536.754895] env[68285]: INFO nova.compute.claims [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1536.861519] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.368482] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.368683] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.809049] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb801507-81f0-4f86-9464-b70d7c0798cd {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.815536] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090da3b4-fac0-48f8-a529-9efd609428da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.845411] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b83524c-44ca-4cd6-a45b-3afacc1bd95b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.851954] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690c447c-6e42-44aa-9af7-9db0594ab63e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.864405] env[68285]: DEBUG nova.compute.provider_tree [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1537.870782] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1538.367895] env[68285]: DEBUG nova.scheduler.client.report [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1538.872730] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.119s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1538.873236] env[68285]: DEBUG nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Start building networks asynchronously for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1538.875923] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.005s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1538.876140] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1538.876310] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1538.877332] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae508c54-a9cf-4963-827b-b65f6375c0f6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.885548] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e84f149-6b8f-4c80-9c68-66e1c7b2cb85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.899145] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915db180-d85a-4379-a617-be5faa0d9c40 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.905908] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47674efd-c57c-40de-9b36-0911b7d7c66d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.935464] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180546MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1538.935637] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1538.935855] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1539.378559] env[68285]: DEBUG nova.compute.utils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Using /dev/sd instead of None {{(pid=68285) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1539.380315] env[68285]: DEBUG nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Allocating IP information in the background. {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1539.380487] env[68285]: DEBUG nova.network.neutron [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] allocate_for_instance() {{(pid=68285) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1539.437940] env[68285]: DEBUG nova.policy [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '692b7f9e18974b8c83f30adb9dbfe8a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c272180eed81480fabd7e6d4dacc2613', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68285) authorize /opt/stack/nova/nova/policy.py:192}} [ 1539.682212] env[68285]: DEBUG nova.network.neutron [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Successfully created port: d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1539.884238] env[68285]: DEBUG nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Start building block device mappings for instance. {{(pid=68285) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1539.958785] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 7cda8bbd-a75f-4a6a-8905-3f387fcbd624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1539.958960] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 9583f10c-00be-4712-8018-04d642c9c597 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1539.959163] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1539.959349] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1539.959488] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1540.005222] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7158c2c6-b784-440f-a5b4-95f7210c4701 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.012563] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90d76c0-e949-44b7-80df-e057995ab673 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.042855] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084230a0-4e6a-408c-91cd-7cc21ade4905 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.050424] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b215b9-b708-4c43-a678-17ba0bb14874 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.063929] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.566863] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1540.896405] env[68285]: DEBUG nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Start spawning the instance on the hypervisor. {{(pid=68285) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1540.922930] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T15:48:17Z,direct_url=,disk_format='vmdk',id=ce84ab4c-9913-42dc-b839-714ad2184867,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='7499298836c74d9cb7f25c3b3f185ac3',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T15:48:18Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1540.923199] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1540.923358] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1540.923538] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1540.923689] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1540.923834] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1540.924046] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1540.924209] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1540.924375] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1540.924555] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1540.924733] env[68285]: DEBUG nova.virt.hardware [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1540.925593] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff43cd4-7f35-4532-9db5-f9d27e3fcab9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.933301] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0da8c1-ed26-4291-885e-3c3a54fc8ad5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.037833] env[68285]: DEBUG nova.compute.manager [req-0d8fcf57-0ce6-4568-8b46-8710f6507398 req-981ebf67-3211-441a-a845-61159d23f525 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-vif-plugged-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1541.038147] env[68285]: DEBUG oslo_concurrency.lockutils [req-0d8fcf57-0ce6-4568-8b46-8710f6507398 req-981ebf67-3211-441a-a845-61159d23f525 service nova] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1541.038286] env[68285]: DEBUG oslo_concurrency.lockutils [req-0d8fcf57-0ce6-4568-8b46-8710f6507398 req-981ebf67-3211-441a-a845-61159d23f525 service nova] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1541.038464] env[68285]: DEBUG oslo_concurrency.lockutils [req-0d8fcf57-0ce6-4568-8b46-8710f6507398 req-981ebf67-3211-441a-a845-61159d23f525 service nova] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1541.038636] env[68285]: DEBUG nova.compute.manager [req-0d8fcf57-0ce6-4568-8b46-8710f6507398 req-981ebf67-3211-441a-a845-61159d23f525 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] No waiting events found dispatching network-vif-plugged-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1541.038778] env[68285]: WARNING nova.compute.manager [req-0d8fcf57-0ce6-4568-8b46-8710f6507398 req-981ebf67-3211-441a-a845-61159d23f525 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received unexpected event network-vif-plugged-d3493390-fdd6-4fff-a513-fd627306eed0 for instance with vm_state building and task_state spawning. [ 1541.070903] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1541.071103] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.135s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1541.123228] env[68285]: DEBUG nova.network.neutron [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Successfully updated port: d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1541.568719] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1541.568959] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1541.569123] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1541.626151] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.626323] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1541.626503] env[68285]: DEBUG nova.network.neutron [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1542.157805] env[68285]: DEBUG nova.network.neutron [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Instance cache missing network info. {{(pid=68285) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1542.273082] env[68285]: DEBUG nova.network.neutron [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3493390-fd", "ovs_interfaceid": "d3493390-fdd6-4fff-a513-fd627306eed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1542.776095] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1542.776503] env[68285]: DEBUG nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Instance network_info: |[{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3493390-fd", "ovs_interfaceid": "d3493390-fdd6-4fff-a513-fd627306eed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68285) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1542.777093] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:da:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82ca17df-257e-40e6-9ec9-310ed6f05ccb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3493390-fdd6-4fff-a513-fd627306eed0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1542.786467] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1542.786684] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1542.786915] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89fa8235-3038-4c59-931b-543dad5affe9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.807655] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1542.807655] env[68285]: value = "task-2892806" [ 1542.807655] env[68285]: _type = "Task" [ 1542.807655] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.815798] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892806, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.065280] env[68285]: DEBUG nova.compute.manager [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1543.065425] env[68285]: DEBUG nova.compute.manager [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing instance network info cache due to event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1543.065619] env[68285]: DEBUG oslo_concurrency.lockutils [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] Acquiring lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.065779] env[68285]: DEBUG oslo_concurrency.lockutils [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] Acquired lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.065912] env[68285]: DEBUG nova.network.neutron [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1543.317424] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892806, 'name': CreateVM_Task, 'duration_secs': 0.318064} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.317817] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1543.318227] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.318395] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.318743] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1543.319016] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ca26fa3-5dce-44dc-a543-e8a56e8fe76a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.323376] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1543.323376] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52faa7b9-c521-d7c4-11aa-656397d6e31d" [ 1543.323376] env[68285]: _type = "Task" [ 1543.323376] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.332520] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52faa7b9-c521-d7c4-11aa-656397d6e31d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.757240] env[68285]: DEBUG nova.network.neutron [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updated VIF entry in instance network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1543.757595] env[68285]: DEBUG nova.network.neutron [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3493390-fd", "ovs_interfaceid": "d3493390-fdd6-4fff-a513-fd627306eed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.833801] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52faa7b9-c521-d7c4-11aa-656397d6e31d, 'name': SearchDatastore_Task, 'duration_secs': 0.012659} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.834118] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1543.834355] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Processing image ce84ab4c-9913-42dc-b839-714ad2184867 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1543.834608] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.834758] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.834939] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1543.835209] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-445d58e9-214e-4952-a823-4c5af3f35b25 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.844341] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1543.844482] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1543.845226] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-886d43f0-a51f-455f-b5ec-96ec5365e739 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.850483] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1543.850483] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52855a65-9185-6b77-b485-41f3b58f32f5" [ 1543.850483] env[68285]: _type = "Task" [ 1543.850483] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.857969] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52855a65-9185-6b77-b485-41f3b58f32f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.260835] env[68285]: DEBUG oslo_concurrency.lockutils [req-bb4067bf-01d1-4d0c-b481-d8049d2b0e7a req-f31b18d5-dc1d-4ae2-8da7-ae940288b6b1 service nova] Releasing lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.362167] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52855a65-9185-6b77-b485-41f3b58f32f5, 'name': SearchDatastore_Task, 'duration_secs': 0.008683} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.362167] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42a8a56f-47ef-4c85-8854-4f7db986b34f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.367089] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1544.367089] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52b3739d-f14c-e7e1-3d7d-d827c97a777b" [ 1544.367089] env[68285]: _type = "Task" [ 1544.367089] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.374598] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b3739d-f14c-e7e1-3d7d-d827c97a777b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.878061] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52b3739d-f14c-e7e1-3d7d-d827c97a777b, 'name': SearchDatastore_Task, 'duration_secs': 0.009659} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.878061] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.878061] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1544.878061] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c70c466b-d8c0-485f-820f-988711a52ba3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.884839] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1544.884839] env[68285]: value = "task-2892807" [ 1544.884839] env[68285]: _type = "Task" [ 1544.884839] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.892162] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.394931] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.436169} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.396156] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ce84ab4c-9913-42dc-b839-714ad2184867/ce84ab4c-9913-42dc-b839-714ad2184867.vmdk to [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1545.396156] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Extending root virtual disk to 1048576 {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1545.396156] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6555051-5092-4bc8-990e-ee3b34dbb56d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.402921] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1545.402921] env[68285]: value = "task-2892808" [ 1545.402921] env[68285]: _type = "Task" [ 1545.402921] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.409811] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892808, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.912934] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892808, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062609} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.913209] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Extended root virtual disk {{(pid=68285) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1545.913933] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88528a8-5c15-4e15-8a4a-ba830599c0eb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.935021] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1545.935205] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67286571-cf10-426b-8a8f-1a80ec14b40f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.955134] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1545.955134] env[68285]: value = "task-2892809" [ 1545.955134] env[68285]: _type = "Task" [ 1545.955134] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.962499] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892809, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.465737] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892809, 'name': ReconfigVM_Task, 'duration_secs': 0.255609} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.466151] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Reconfigured VM instance instance-00000080 to attach disk [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk or device None with type sparse {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1546.466589] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba29bf40-0bcf-4309-838b-ea3472bac024 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.473089] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1546.473089] env[68285]: value = "task-2892810" [ 1546.473089] env[68285]: _type = "Task" [ 1546.473089] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.480207] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892810, 'name': Rename_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.983107] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892810, 'name': Rename_Task, 'duration_secs': 0.292275} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.983401] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1546.983646] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ece99014-ee53-4253-8b06-1db11afeae8c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.990600] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1546.990600] env[68285]: value = "task-2892811" [ 1546.990600] env[68285]: _type = "Task" [ 1546.990600] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.997646] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.500504] env[68285]: DEBUG oslo_vmware.api [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892811, 'name': PowerOnVM_Task, 'duration_secs': 0.440008} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.500770] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1547.500966] env[68285]: INFO nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Took 6.60 seconds to spawn the instance on the hypervisor. [ 1547.501162] env[68285]: DEBUG nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1547.501894] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69210a9b-2b08-4565-968e-a866e95e3bc0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.017707] env[68285]: INFO nova.compute.manager [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Took 11.28 seconds to build instance. [ 1548.157060] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1548.157354] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1548.157568] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1548.157751] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1548.157921] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1548.160039] env[68285]: INFO nova.compute.manager [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Terminating instance [ 1548.519781] env[68285]: DEBUG oslo_concurrency.lockutils [None req-832df51e-cfcf-4489-b3de-f1b1aca45b79 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.791s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1548.664053] env[68285]: DEBUG nova.compute.manager [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1548.664053] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1548.664990] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e770416-7930-49d4-9dba-103e5a5c8770 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.672490] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1548.672768] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea3d2f89-c98b-4a42-b8dc-2e2f44143d92 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.678166] env[68285]: DEBUG oslo_vmware.api [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1548.678166] env[68285]: value = "task-2892812" [ 1548.678166] env[68285]: _type = "Task" [ 1548.678166] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.686540] env[68285]: DEBUG oslo_vmware.api [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.036192] env[68285]: DEBUG nova.compute.manager [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1549.036192] env[68285]: DEBUG nova.compute.manager [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing instance network info cache due to event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1549.036192] env[68285]: DEBUG oslo_concurrency.lockutils [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] Acquiring lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.036192] env[68285]: DEBUG oslo_concurrency.lockutils [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] Acquired lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1549.036192] env[68285]: DEBUG nova.network.neutron [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.187475] env[68285]: DEBUG oslo_vmware.api [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892812, 'name': PowerOffVM_Task, 'duration_secs': 0.182235} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.187739] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1549.187905] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1549.188170] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5f2cd54-c955-4f02-8a99-bef5894c5447 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.249584] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1549.249776] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1549.249923] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Deleting the datastore file [datastore2] 7cda8bbd-a75f-4a6a-8905-3f387fcbd624 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1549.250212] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58b35ed1-afec-4271-8fbf-5975a3ec4193 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.257226] env[68285]: DEBUG oslo_vmware.api [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for the task: (returnval){ [ 1549.257226] env[68285]: value = "task-2892814" [ 1549.257226] env[68285]: _type = "Task" [ 1549.257226] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.264769] env[68285]: DEBUG oslo_vmware.api [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892814, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.728602] env[68285]: DEBUG nova.network.neutron [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updated VIF entry in instance network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1549.728975] env[68285]: DEBUG nova.network.neutron [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3493390-fd", "ovs_interfaceid": "d3493390-fdd6-4fff-a513-fd627306eed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.766836] env[68285]: DEBUG oslo_vmware.api [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Task: {'id': task-2892814, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148254} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.767057] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1549.767245] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1549.767421] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1549.767588] env[68285]: INFO nova.compute.manager [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1549.767828] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1549.768017] env[68285]: DEBUG nova.compute.manager [-] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1549.768118] env[68285]: DEBUG nova.network.neutron [-] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1550.231857] env[68285]: DEBUG oslo_concurrency.lockutils [req-6bb1f007-9625-49b4-b90c-a5288f0be471 req-38306217-ab59-427a-80ff-c9b996181bf3 service nova] Releasing lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1550.705475] env[68285]: DEBUG nova.network.neutron [-] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.061268] env[68285]: DEBUG nova.compute.manager [req-8afc9f68-2865-42b5-bd43-df10711ae5d4 req-8a503d92-f3dd-4a77-b101-7604a68311e1 service nova] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Received event network-vif-deleted-3c86daf5-dd08-47db-a3c9-f356b57b59c3 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1551.208770] env[68285]: INFO nova.compute.manager [-] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Took 1.44 seconds to deallocate network for instance. [ 1551.715885] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1551.716179] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.716401] env[68285]: DEBUG nova.objects.instance [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lazy-loading 'resources' on Instance uuid 7cda8bbd-a75f-4a6a-8905-3f387fcbd624 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1552.271114] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f55451c-4c08-4ea5-a133-54f1b1f12c89 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.278408] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae4b290-df7b-4848-986e-8795bff564c2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.308599] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d1a27b-ef8c-461f-b89e-42f1d66accb1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.315534] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2303da51-932d-4404-a319-547f21031a3b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.328327] env[68285]: DEBUG nova.compute.provider_tree [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1552.831811] env[68285]: DEBUG nova.scheduler.client.report [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1553.336514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1553.354268] env[68285]: INFO nova.scheduler.client.report [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Deleted allocations for instance 7cda8bbd-a75f-4a6a-8905-3f387fcbd624 [ 1553.862455] env[68285]: DEBUG oslo_concurrency.lockutils [None req-39d76453-914c-49fe-aded-0d822c960c68 tempest-ServersTestJSON-1426161620 tempest-ServersTestJSON-1426161620-project-member] Lock "7cda8bbd-a75f-4a6a-8905-3f387fcbd624" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.705s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1557.878763] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "9583f10c-00be-4712-8018-04d642c9c597" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.879206] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.879299] env[68285]: DEBUG nova.compute.manager [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1557.880194] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26defb4e-585c-46cc-8fca-4e38bc6061e0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.886831] env[68285]: DEBUG nova.compute.manager [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68285) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1557.887386] env[68285]: DEBUG nova.objects.instance [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'flavor' on Instance uuid 9583f10c-00be-4712-8018-04d642c9c597 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1558.894541] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1558.894926] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59a8a093-8b31-4baa-9220-dcfba2be5c66 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.902901] env[68285]: DEBUG oslo_vmware.api [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1558.902901] env[68285]: value = "task-2892815" [ 1558.902901] env[68285]: _type = "Task" [ 1558.902901] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.912173] env[68285]: DEBUG oslo_vmware.api [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.414366] env[68285]: DEBUG oslo_vmware.api [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892815, 'name': PowerOffVM_Task, 'duration_secs': 0.183719} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.414606] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1559.414829] env[68285]: DEBUG nova.compute.manager [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1559.415598] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32c67eb-28e8-4369-9f1d-c74fb51251df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.926094] env[68285]: DEBUG oslo_concurrency.lockutils [None req-cf3a5efd-39e4-4c6f-8224-ffb9802d22e9 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1560.234995] env[68285]: DEBUG nova.objects.instance [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'flavor' on Instance uuid 9583f10c-00be-4712-8018-04d642c9c597 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.739598] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.739758] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1560.739936] env[68285]: DEBUG nova.network.neutron [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1560.740130] env[68285]: DEBUG nova.objects.instance [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'info_cache' on Instance uuid 9583f10c-00be-4712-8018-04d642c9c597 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1561.243618] env[68285]: DEBUG nova.objects.base [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Object Instance<9583f10c-00be-4712-8018-04d642c9c597> lazy-loaded attributes: flavor,info_cache {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1561.934142] env[68285]: DEBUG nova.network.neutron [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updating instance_info_cache with network_info: [{"id": "b34597a1-042a-4358-9952-2daf4a1a35bb", "address": "fa:16:3e:a8:55:10", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34597a1-04", "ovs_interfaceid": "b34597a1-042a-4358-9952-2daf4a1a35bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.436605] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1563.443221] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1563.443595] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06eeedcf-78bd-48a0-ab3a-c6c56813d1d4 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.451234] env[68285]: DEBUG oslo_vmware.api [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1563.451234] env[68285]: value = "task-2892816" [ 1563.451234] env[68285]: _type = "Task" [ 1563.451234] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.459131] env[68285]: DEBUG oslo_vmware.api [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.960997] env[68285]: DEBUG oslo_vmware.api [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892816, 'name': PowerOnVM_Task, 'duration_secs': 0.360963} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.961282] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1563.961529] env[68285]: DEBUG nova.compute.manager [None req-8291f62f-301b-40c4-b169-cdba1ce9588b tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1563.962306] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b5a67b-e139-4f48-b987-e3ad5b5ab337 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.928420] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58554ff-4ed6-4adc-b0a1-e58cf3570102 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.934915] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72f61687-cb2a-460f-beba-5a0ac23307db tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Suspending the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1564.935151] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2458364f-ab51-4dbd-9f44-4da31cb0b2aa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.941813] env[68285]: DEBUG oslo_vmware.api [None req-72f61687-cb2a-460f-beba-5a0ac23307db tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1564.941813] env[68285]: value = "task-2892817" [ 1564.941813] env[68285]: _type = "Task" [ 1564.941813] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.949818] env[68285]: DEBUG oslo_vmware.api [None req-72f61687-cb2a-460f-beba-5a0ac23307db tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892817, 'name': SuspendVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.450972] env[68285]: DEBUG oslo_vmware.api [None req-72f61687-cb2a-460f-beba-5a0ac23307db tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892817, 'name': SuspendVM_Task} progress is 75%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.952559] env[68285]: DEBUG oslo_vmware.api [None req-72f61687-cb2a-460f-beba-5a0ac23307db tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892817, 'name': SuspendVM_Task, 'duration_secs': 0.769751} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.952881] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-72f61687-cb2a-460f-beba-5a0ac23307db tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Suspended the VM {{(pid=68285) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1565.953026] env[68285]: DEBUG nova.compute.manager [None req-72f61687-cb2a-460f-beba-5a0ac23307db tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.953704] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020b12e4-03be-4347-beb4-649f53106d54 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.264025] env[68285]: INFO nova.compute.manager [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Resuming [ 1567.264628] env[68285]: DEBUG nova.objects.instance [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'flavor' on Instance uuid 9583f10c-00be-4712-8018-04d642c9c597 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1568.774378] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.774642] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquired lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1568.774742] env[68285]: DEBUG nova.network.neutron [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.468024] env[68285]: DEBUG nova.network.neutron [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updating instance_info_cache with network_info: [{"id": "b34597a1-042a-4358-9952-2daf4a1a35bb", "address": "fa:16:3e:a8:55:10", "network": {"id": "167ff03a-4c88-46a4-9a11-8e12a0cc72a2", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1358683292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43491d0bdffc49eaaad084f3124cffcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb0e556a-0f69-4a5c-af62-ffc46edb8e63", "external-id": "nsx-vlan-transportzone-136", "segmentation_id": 136, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34597a1-04", "ovs_interfaceid": "b34597a1-042a-4358-9952-2daf4a1a35bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.970937] env[68285]: DEBUG oslo_concurrency.lockutils [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Releasing lock "refresh_cache-9583f10c-00be-4712-8018-04d642c9c597" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1569.971939] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac26e7b-8c4b-4ca7-8369-f50609fb2a1c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.978462] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Resuming the VM {{(pid=68285) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1569.978684] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fee7b7ca-52e8-4e61-b89e-f2cb8d427b60 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.985473] env[68285]: DEBUG oslo_vmware.api [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1569.985473] env[68285]: value = "task-2892818" [ 1569.985473] env[68285]: _type = "Task" [ 1569.985473] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.992877] env[68285]: DEBUG oslo_vmware.api [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.498444] env[68285]: DEBUG oslo_vmware.api [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892818, 'name': PowerOnVM_Task, 'duration_secs': 0.469096} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.498720] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Resumed the VM {{(pid=68285) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1570.498922] env[68285]: DEBUG nova.compute.manager [None req-8a1250e8-86a9-4571-8dc4-e96a4eb4b53d tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1570.499720] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e64030-f273-4bf6-b764-e357e3fee2ef {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.921518] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "9583f10c-00be-4712-8018-04d642c9c597" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1571.921905] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1571.922019] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "9583f10c-00be-4712-8018-04d642c9c597-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1571.922237] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1571.922423] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1571.924982] env[68285]: INFO nova.compute.manager [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Terminating instance [ 1572.428462] env[68285]: DEBUG nova.compute.manager [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1572.428687] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1572.430008] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48ff013-deba-4f35-831f-f4792f766300 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.437752] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1572.437978] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-085f1791-66cd-4941-8572-8df5b7a123b9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.443974] env[68285]: DEBUG oslo_vmware.api [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1572.443974] env[68285]: value = "task-2892819" [ 1572.443974] env[68285]: _type = "Task" [ 1572.443974] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.451755] env[68285]: DEBUG oslo_vmware.api [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892819, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.955631] env[68285]: DEBUG oslo_vmware.api [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892819, 'name': PowerOffVM_Task, 'duration_secs': 0.183414} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.956063] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1572.956133] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1572.956398] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4719d2a5-281a-44c0-b107-78f41828640e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.019199] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1573.019375] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Deleting contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1573.019550] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleting the datastore file [datastore2] 9583f10c-00be-4712-8018-04d642c9c597 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1573.019806] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27078cec-8fde-449f-830c-3cd75751c404 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.025573] env[68285]: DEBUG oslo_vmware.api [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for the task: (returnval){ [ 1573.025573] env[68285]: value = "task-2892821" [ 1573.025573] env[68285]: _type = "Task" [ 1573.025573] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.034050] env[68285]: DEBUG oslo_vmware.api [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.534389] env[68285]: DEBUG oslo_vmware.api [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Task: {'id': task-2892821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146385} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.534657] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1573.534887] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Deleted contents of the VM from datastore datastore2 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1573.535065] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1573.535269] env[68285]: INFO nova.compute.manager [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1573.535570] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1573.535788] env[68285]: DEBUG nova.compute.manager [-] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1573.535869] env[68285]: DEBUG nova.network.neutron [-] [instance: 9583f10c-00be-4712-8018-04d642c9c597] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1573.964296] env[68285]: DEBUG nova.compute.manager [req-49a0041e-e93b-47f8-a022-498a3983f264 req-9f1dff8e-0a63-40b3-983c-983a1b6d36d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Received event network-vif-deleted-b34597a1-042a-4358-9952-2daf4a1a35bb {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1573.964519] env[68285]: INFO nova.compute.manager [req-49a0041e-e93b-47f8-a022-498a3983f264 req-9f1dff8e-0a63-40b3-983c-983a1b6d36d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Neutron deleted interface b34597a1-042a-4358-9952-2daf4a1a35bb; detaching it from the instance and deleting it from the info cache [ 1573.964710] env[68285]: DEBUG nova.network.neutron [req-49a0041e-e93b-47f8-a022-498a3983f264 req-9f1dff8e-0a63-40b3-983c-983a1b6d36d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.446491] env[68285]: DEBUG nova.network.neutron [-] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.467623] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04a273f2-f76a-4bf3-928b-2809997d1412 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.477879] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a15953-110b-4cad-83ac-95ff9bfcb629 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.502060] env[68285]: DEBUG nova.compute.manager [req-49a0041e-e93b-47f8-a022-498a3983f264 req-9f1dff8e-0a63-40b3-983c-983a1b6d36d2 service nova] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Detach interface failed, port_id=b34597a1-042a-4358-9952-2daf4a1a35bb, reason: Instance 9583f10c-00be-4712-8018-04d642c9c597 could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1574.949681] env[68285]: INFO nova.compute.manager [-] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Took 1.41 seconds to deallocate network for instance. [ 1575.456239] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1575.456594] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.456759] env[68285]: DEBUG nova.objects.instance [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lazy-loading 'resources' on Instance uuid 9583f10c-00be-4712-8018-04d642c9c597 {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1576.004997] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcf6240-1d56-4d28-adaf-cfb4f534cb06 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.012389] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbb05ae-86f7-468a-b0b3-cc842ef53613 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.042351] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3350b88-2a2e-4d22-922c-78191c66265e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.049678] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0cfaa5-888c-4dd2-a651-154a99a5a783 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.062279] env[68285]: DEBUG nova.compute.provider_tree [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.565311] env[68285]: DEBUG nova.scheduler.client.report [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1577.070140] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.613s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1577.090488] env[68285]: INFO nova.scheduler.client.report [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Deleted allocations for instance 9583f10c-00be-4712-8018-04d642c9c597 [ 1577.598829] env[68285]: DEBUG oslo_concurrency.lockutils [None req-f78996a3-65f8-4dcb-9bbb-b532ad06ee75 tempest-ServerActionsTestJSON-223452269 tempest-ServerActionsTestJSON-223452269-project-member] Lock "9583f10c-00be-4712-8018-04d642c9c597" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.677s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1586.411581] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1586.411953] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1586.412017] env[68285]: INFO nova.compute.manager [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Shelving [ 1587.420274] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1587.420647] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79e68d74-8554-480f-95c6-c3c2a0da5702 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.428151] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1587.428151] env[68285]: value = "task-2892822" [ 1587.428151] env[68285]: _type = "Task" [ 1587.428151] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.437592] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.937818] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892822, 'name': PowerOffVM_Task, 'duration_secs': 0.176133} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.938087] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1587.938854] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebeec2ac-15c4-498e-9b97-b324d757049e {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.956928] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28505f4e-9f5b-4984-83de-d1e6e0f0f6df {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.467123] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Creating Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1588.467507] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bc7c5374-fa2e-425a-8993-e257fcea6060 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.475725] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1588.475725] env[68285]: value = "task-2892823" [ 1588.475725] env[68285]: _type = "Task" [ 1588.475725] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.483348] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892823, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.986076] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892823, 'name': CreateSnapshot_Task, 'duration_secs': 0.420493} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.986348] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Created Snapshot of the VM instance {{(pid=68285) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1588.987073] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58f72df-7ea5-479d-aed5-a83ed18c1b83 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.503408] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Creating linked-clone VM from snapshot {{(pid=68285) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1589.503723] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3c9a2574-36a2-46e4-8cf0-e7cb65a3dc3a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.512367] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1589.512367] env[68285]: value = "task-2892824" [ 1589.512367] env[68285]: _type = "Task" [ 1589.512367] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.519840] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892824, 'name': CloneVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.022682] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892824, 'name': CloneVM_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.523360] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892824, 'name': CloneVM_Task, 'duration_secs': 0.955105} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.523707] env[68285]: INFO nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Created linked-clone VM from snapshot [ 1590.524354] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3d71d1-c6a1-486d-925b-182f9f2bc098 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.531164] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Uploading image 0b82e540-ae04-4a9b-a308-8d68f85a37cb {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1590.551512] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1590.551512] env[68285]: value = "vm-581123" [ 1590.551512] env[68285]: _type = "VirtualMachine" [ 1590.551512] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1590.551762] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a8b58a0d-a7f1-45c5-9479-f515b5b06465 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.558882] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease: (returnval){ [ 1590.558882] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d6a195-a620-3603-69a5-60926715f012" [ 1590.558882] env[68285]: _type = "HttpNfcLease" [ 1590.558882] env[68285]: } obtained for exporting VM: (result){ [ 1590.558882] env[68285]: value = "vm-581123" [ 1590.558882] env[68285]: _type = "VirtualMachine" [ 1590.558882] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1590.559171] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the lease: (returnval){ [ 1590.559171] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d6a195-a620-3603-69a5-60926715f012" [ 1590.559171] env[68285]: _type = "HttpNfcLease" [ 1590.559171] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1590.565186] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1590.565186] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d6a195-a620-3603-69a5-60926715f012" [ 1590.565186] env[68285]: _type = "HttpNfcLease" [ 1590.565186] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1591.067381] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1591.067381] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d6a195-a620-3603-69a5-60926715f012" [ 1591.067381] env[68285]: _type = "HttpNfcLease" [ 1591.067381] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1591.067675] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1591.067675] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52d6a195-a620-3603-69a5-60926715f012" [ 1591.067675] env[68285]: _type = "HttpNfcLease" [ 1591.067675] env[68285]: }. {{(pid=68285) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1591.068367] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8792d847-8000-4dd1-95fa-1e7b05035538 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.076366] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ad21a-d01a-a6cd-abdf-8f11a22ac594/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1591.076539] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ad21a-d01a-a6cd-abdf-8f11a22ac594/disk-0.vmdk for reading. {{(pid=68285) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1591.161385] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a0afda4f-2176-4f6d-87d7-cd8a52ded1c0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.867098] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.866957] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.861822] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.865461] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.865924] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1597.369599] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1597.369848] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1597.370269] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1597.370569] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1597.371555] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc5e958-80d0-4afd-b959-b08dbb2b3d68 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.379947] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc34f6ac-223e-4d39-83e2-51b199a48988 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.395361] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad49d0eb-b121-4e6b-9beb-951d5ae8754a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.402838] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4ec52f-4e89-4e60-990f-4acd4289e521 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.430660] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180569MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1597.430881] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1597.431026] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1597.965204] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ad21a-d01a-a6cd-abdf-8f11a22ac594/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1597.966077] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0208cd1d-e7bb-48c7-831f-0040187d9dd0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.972477] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ad21a-d01a-a6cd-abdf-8f11a22ac594/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1597.972636] env[68285]: ERROR oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ad21a-d01a-a6cd-abdf-8f11a22ac594/disk-0.vmdk due to incomplete transfer. [ 1597.972842] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1583a005-db92-4020-8a9e-7cc9d21a3f22 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.980133] env[68285]: DEBUG oslo_vmware.rw_handles [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ad21a-d01a-a6cd-abdf-8f11a22ac594/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1597.980323] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Uploaded image 0b82e540-ae04-4a9b-a308-8d68f85a37cb to the Glance image server {{(pid=68285) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1597.982813] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Destroying the VM {{(pid=68285) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1597.983070] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-559485ae-b49a-4bd2-8e61-a6417a7f784d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.988242] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1597.988242] env[68285]: value = "task-2892826" [ 1597.988242] env[68285]: _type = "Task" [ 1597.988242] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.996656] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892826, 'name': Destroy_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.497733] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892826, 'name': Destroy_Task, 'duration_secs': 0.337661} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.497997] env[68285]: INFO nova.virt.vmwareapi.vm_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Destroyed the VM [ 1598.498257] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Deleting Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1598.498496] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8f9af3b8-2691-47c8-804c-241230cd39a6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.504029] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1598.504029] env[68285]: value = "task-2892827" [ 1598.504029] env[68285]: _type = "Task" [ 1598.504029] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.511178] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892827, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.548429] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Instance 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68285) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.548628] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1598.548767] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1598.573904] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c893ac-1831-4a2e-9650-979c485b0237 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.580538] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba76c777-186c-4573-bc97-ada27dbb117d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.609523] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c0ee99-5861-4a66-82a4-9efdbdf51b85 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.616307] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5578222-bbdd-4f13-8cf4-9e4c27599851 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.628986] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1599.013173] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892827, 'name': RemoveSnapshot_Task, 'duration_secs': 0.349592} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.013477] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Deleted Snapshot of the VM instance {{(pid=68285) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1599.013707] env[68285]: DEBUG nova.compute.manager [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1599.014463] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf954d0c-5a9d-4121-a26f-85e745c08a97 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.132345] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1599.525685] env[68285]: INFO nova.compute.manager [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Shelve offloading [ 1599.637328] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1599.637499] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.206s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1600.029709] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1600.030047] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-160cb733-75a6-407c-8d38-b5184ff81dff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.038026] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1600.038026] env[68285]: value = "task-2892828" [ 1600.038026] env[68285]: _type = "Task" [ 1600.038026] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.045195] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892828, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.548249] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] VM already powered off {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1600.548517] env[68285]: DEBUG nova.compute.manager [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1600.549320] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247038ea-1c53-4aa4-8e8d-eab34fc8f383 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.554770] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.554960] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1600.555160] env[68285]: DEBUG nova.network.neutron [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1600.638323] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.638526] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.866144] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.866329] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1601.249981] env[68285]: DEBUG nova.network.neutron [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3493390-fd", "ovs_interfaceid": "d3493390-fdd6-4fff-a513-fd627306eed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.752491] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1601.942384] env[68285]: DEBUG nova.compute.manager [req-067cb22e-144c-47e8-b0b5-e6be70d5790a req-a1c22be5-d03b-4e12-bd4c-d3beefaa5917 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-vif-unplugged-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1601.942478] env[68285]: DEBUG oslo_concurrency.lockutils [req-067cb22e-144c-47e8-b0b5-e6be70d5790a req-a1c22be5-d03b-4e12-bd4c-d3beefaa5917 service nova] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1601.942686] env[68285]: DEBUG oslo_concurrency.lockutils [req-067cb22e-144c-47e8-b0b5-e6be70d5790a req-a1c22be5-d03b-4e12-bd4c-d3beefaa5917 service nova] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1601.942852] env[68285]: DEBUG oslo_concurrency.lockutils [req-067cb22e-144c-47e8-b0b5-e6be70d5790a req-a1c22be5-d03b-4e12-bd4c-d3beefaa5917 service nova] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1601.943099] env[68285]: DEBUG nova.compute.manager [req-067cb22e-144c-47e8-b0b5-e6be70d5790a req-a1c22be5-d03b-4e12-bd4c-d3beefaa5917 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] No waiting events found dispatching network-vif-unplugged-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1601.943292] env[68285]: WARNING nova.compute.manager [req-067cb22e-144c-47e8-b0b5-e6be70d5790a req-a1c22be5-d03b-4e12-bd4c-d3beefaa5917 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received unexpected event network-vif-unplugged-d3493390-fdd6-4fff-a513-fd627306eed0 for instance with vm_state shelved and task_state shelving_offloading. [ 1602.024915] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1602.025837] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091061cd-d4f4-4a5f-a8b6-37ceef927455 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.033393] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1602.033623] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be5b1a86-8031-4f1f-a5ff-6b71237401b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.100801] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1602.101044] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1602.101211] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleting the datastore file [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1602.101470] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b725af86-e513-4c31-86c1-a82443f277ac {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.107612] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1602.107612] env[68285]: value = "task-2892830" [ 1602.107612] env[68285]: _type = "Task" [ 1602.107612] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.115438] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892830, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.617882] env[68285]: DEBUG oslo_vmware.api [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892830, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139163} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.618247] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1602.618408] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1602.618592] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1602.643806] env[68285]: INFO nova.scheduler.client.report [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted allocations for instance 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a [ 1603.149208] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1603.149507] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1603.149730] env[68285]: DEBUG nova.objects.instance [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'resources' on Instance uuid 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1603.652633] env[68285]: DEBUG nova.objects.instance [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'numa_topology' on Instance uuid 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1603.868583] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.868583] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Cleaning up deleted instances {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1603.967387] env[68285]: DEBUG nova.compute.manager [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1603.967627] env[68285]: DEBUG nova.compute.manager [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing instance network info cache due to event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1603.967760] env[68285]: DEBUG oslo_concurrency.lockutils [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] Acquiring lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.968098] env[68285]: DEBUG oslo_concurrency.lockutils [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] Acquired lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1603.969436] env[68285]: DEBUG nova.network.neutron [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1604.155036] env[68285]: DEBUG nova.objects.base [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Object Instance<2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a> lazy-loaded attributes: resources,numa_topology {{(pid=68285) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1604.169036] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab16395c-5dcb-4a58-9991-42c064963f6a {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.177095] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293465b2-9974-4c58-9df7-7990717ef8b7 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.205879] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7e2b5e-1135-4798-8000-1e598a09f3b6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.213169] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042b4411-0177-43b6-adcd-a8189c436c3f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.225929] env[68285]: DEBUG nova.compute.provider_tree [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1604.377253] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] There are 26 instances to clean {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1604.377506] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9583f10c-00be-4712-8018-04d642c9c597] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1604.668053] env[68285]: DEBUG nova.network.neutron [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updated VIF entry in instance network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1604.668415] env[68285]: DEBUG nova.network.neutron [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd3493390-fd", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.728790] env[68285]: DEBUG nova.scheduler.client.report [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1604.880935] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 7cda8bbd-a75f-4a6a-8905-3f387fcbd624] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1605.171014] env[68285]: DEBUG oslo_concurrency.lockutils [req-36baa859-7db6-4040-ada4-b614f33f9235 req-36ead630-6d39-4563-9df3-92c0f04ebb44 service nova] Releasing lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1605.233207] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1605.383757] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: a8a67f90-047d-49ce-8de0-ee3e17998c6b] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1605.741819] env[68285]: DEBUG oslo_concurrency.lockutils [None req-1651128b-4e76-4bdd-a4c1-9ef995d05975 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.330s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1605.887236] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 005f8c9a-8327-4c60-a016-0460ca42f65f] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1606.020665] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1606.020935] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1606.021138] env[68285]: INFO nova.compute.manager [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Unshelving [ 1606.390529] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: f8533809-ac64-4a1a-8fa8-45648110932d] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1606.893945] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: f695fd3a-c8f0-4db4-8e7d-1ed0155cff5b] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1607.043032] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1607.043167] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1607.043360] env[68285]: DEBUG nova.objects.instance [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'pci_requests' on Instance uuid 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1607.397605] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: f9d35416-1f7f-4bf5-baba-1ce4e7436341] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1607.547245] env[68285]: DEBUG nova.objects.instance [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'numa_topology' on Instance uuid 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1607.900852] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 1d55a520-481f-4a47-bb06-9e794f9347a7] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1608.049527] env[68285]: INFO nova.compute.claims [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1608.404120] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 6f6037bf-5527-4391-857b-47bc68fb04fc] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1608.907162] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: e449ac04-e05c-4134-95b3-4bbc45fa26e4] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1609.084372] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88d70bd-56f3-4273-837d-6573177be193 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.091646] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe08254-0d9e-44b0-87ca-066a95e02395 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.120608] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930c4491-dd55-46bb-8ae9-64951aa4ee01 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.127286] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be01f23f-7d2e-45df-82bc-c00c83a2cd9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.139885] env[68285]: DEBUG nova.compute.provider_tree [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1609.410129] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: c341075b-9d30-45db-9d83-f196bf90ecd3] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1609.642958] env[68285]: DEBUG nova.scheduler.client.report [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1609.914879] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 9ddeb48e-ef72-4e6e-9058-d45ebde7583e] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1610.148079] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.105s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1610.176786] env[68285]: INFO nova.network.neutron [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating port d3493390-fdd6-4fff-a513-fd627306eed0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1610.417809] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 8a598506-724f-48f6-91a8-1e02483e6aab] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1610.921171] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: e3117ede-5d88-4e47-a32f-ea91b1ba83ec] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1611.424481] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 0329a534-0ba1-48df-aa9a-01d50bafab05] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1611.620967] env[68285]: DEBUG nova.compute.manager [req-de45c916-9a1a-41a5-9e7c-b6d1e2ab6c98 req-60d17138-e5f2-416f-9902-624f6a23f59d service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-vif-plugged-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1611.621186] env[68285]: DEBUG oslo_concurrency.lockutils [req-de45c916-9a1a-41a5-9e7c-b6d1e2ab6c98 req-60d17138-e5f2-416f-9902-624f6a23f59d service nova] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1611.621394] env[68285]: DEBUG oslo_concurrency.lockutils [req-de45c916-9a1a-41a5-9e7c-b6d1e2ab6c98 req-60d17138-e5f2-416f-9902-624f6a23f59d service nova] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1611.621609] env[68285]: DEBUG oslo_concurrency.lockutils [req-de45c916-9a1a-41a5-9e7c-b6d1e2ab6c98 req-60d17138-e5f2-416f-9902-624f6a23f59d service nova] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1611.621728] env[68285]: DEBUG nova.compute.manager [req-de45c916-9a1a-41a5-9e7c-b6d1e2ab6c98 req-60d17138-e5f2-416f-9902-624f6a23f59d service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] No waiting events found dispatching network-vif-plugged-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1611.621886] env[68285]: WARNING nova.compute.manager [req-de45c916-9a1a-41a5-9e7c-b6d1e2ab6c98 req-60d17138-e5f2-416f-9902-624f6a23f59d service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received unexpected event network-vif-plugged-d3493390-fdd6-4fff-a513-fd627306eed0 for instance with vm_state shelved_offloaded and task_state spawning. [ 1611.706030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.706030] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1611.706030] env[68285]: DEBUG nova.network.neutron [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Building network info cache for instance {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1611.928113] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: a1dc8c86-523f-4474-9fea-9ccf35a36b3f] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1612.393795] env[68285]: DEBUG nova.network.neutron [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3493390-fd", "ovs_interfaceid": "d3493390-fdd6-4fff-a513-fd627306eed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.430788] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 0d99fb99-977e-4edc-93d8-492d55fd68a7] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1612.896514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1612.922090] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T15:48:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='640c86d2191af1ce3d91c6447f868355',container_format='bare',created_at=2025-03-10T16:04:26Z,direct_url=,disk_format='vmdk',id=0b82e540-ae04-4a9b-a308-8d68f85a37cb,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-335432635-shelved',owner='c272180eed81480fabd7e6d4dacc2613',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-03-10T16:04:38Z,virtual_size=,visibility=), allow threads: False {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1612.922362] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1612.922536] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image limits 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1612.922723] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Flavor pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1612.922869] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Image pref 0:0:0 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1612.923023] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68285) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1612.923252] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1612.923416] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1612.923580] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Got 1 possible topologies {{(pid=68285) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1612.923738] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1612.923908] env[68285]: DEBUG nova.virt.hardware [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68285) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1612.924772] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd96ab7c-eb34-4461-8900-d768cb3e2ae1 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.932263] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fcdbd4-0ad6-47e3-a711-0fbea6056041 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.936234] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: faf810ae-7823-4115-a709-99dc7c480867] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1612.948402] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:da:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82ca17df-257e-40e6-9ec9-310ed6f05ccb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3493390-fdd6-4fff-a513-fd627306eed0', 'vif_model': 'vmxnet3'}] {{(pid=68285) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1612.955938] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1612.955938] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Creating VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1612.956135] env[68285]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60819203-2ca6-4f6c-94c9-c7791791eaa8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.975212] env[68285]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1612.975212] env[68285]: value = "task-2892831" [ 1612.975212] env[68285]: _type = "Task" [ 1612.975212] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.982485] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892831, 'name': CreateVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.439668] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 51bdaa10-0cf3-4052-9f5c-7d4dad565fd6] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1613.485128] env[68285]: DEBUG oslo_vmware.api [-] Task: {'id': task-2892831, 'name': CreateVM_Task, 'duration_secs': 0.281718} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.485283] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Created VM on the ESX host {{(pid=68285) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1613.485926] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.486109] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1613.486503] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1613.486753] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b14d5d1b-ddab-494f-b849-4886fbaede07 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.490658] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1613.490658] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]5230a86a-74c1-0e2b-b2c5-bed95b6540e9" [ 1613.490658] env[68285]: _type = "Task" [ 1613.490658] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.497939] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]5230a86a-74c1-0e2b-b2c5-bed95b6540e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.647058] env[68285]: DEBUG nova.compute.manager [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1613.647264] env[68285]: DEBUG nova.compute.manager [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing instance network info cache due to event network-changed-d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1613.647496] env[68285]: DEBUG oslo_concurrency.lockutils [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] Acquiring lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.647658] env[68285]: DEBUG oslo_concurrency.lockutils [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] Acquired lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1613.647834] env[68285]: DEBUG nova.network.neutron [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Refreshing network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1613.943043] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 852ab501-00a6-442b-804a-1bbf49a2be8c] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1614.000677] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1614.000889] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Processing image 0b82e540-ae04-4a9b-a308-8d68f85a37cb {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1614.001143] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb/0b82e540-ae04-4a9b-a308-8d68f85a37cb.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.001294] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb/0b82e540-ae04-4a9b-a308-8d68f85a37cb.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1614.001470] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1614.001710] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e799445d-dc09-4ffc-850d-4ef52c8da492 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.018069] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1614.018245] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68285) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1614.018921] env[68285]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a00ec28a-fe7e-40bb-8194-d59f9b409bd3 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.023868] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1614.023868] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52f33daf-3dcf-a89e-fa95-990a4f36180d" [ 1614.023868] env[68285]: _type = "Task" [ 1614.023868] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.031170] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': session[52410409-0226-2549-941e-c989b8ec60bd]52f33daf-3dcf-a89e-fa95-990a4f36180d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.332773] env[68285]: DEBUG nova.network.neutron [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updated VIF entry in instance network info cache for port d3493390-fdd6-4fff-a513-fd627306eed0. {{(pid=68285) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1614.333191] env[68285]: DEBUG nova.network.neutron [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [{"id": "d3493390-fdd6-4fff-a513-fd627306eed0", "address": "fa:16:3e:f6:da:11", "network": {"id": "5c780024-3e5f-47af-94ae-4070034a0fa1", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1486874028-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c272180eed81480fabd7e6d4dacc2613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3493390-fd", "ovs_interfaceid": "d3493390-fdd6-4fff-a513-fd627306eed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.446722] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: b2199b56-64bd-4096-b877-e10656b09313] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1614.533749] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Preparing fetch location {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1614.533954] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Fetch image to [datastore1] OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15/OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15.vmdk {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1614.534156] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Downloading stream optimized image 0b82e540-ae04-4a9b-a308-8d68f85a37cb to [datastore1] OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15/OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15.vmdk on the data store datastore1 as vApp {{(pid=68285) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1614.534330] env[68285]: DEBUG nova.virt.vmwareapi.images [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Downloading image file data 0b82e540-ae04-4a9b-a308-8d68f85a37cb to the ESX as VM named 'OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15' {{(pid=68285) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1614.596743] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1614.596743] env[68285]: value = "resgroup-9" [ 1614.596743] env[68285]: _type = "ResourcePool" [ 1614.596743] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1614.597025] env[68285]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-171264ef-abf4-49f0-926f-3dc7dbd7bbf2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.617362] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease: (returnval){ [ 1614.617362] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52316fc8-d420-8339-33a2-d5d54ff21a6c" [ 1614.617362] env[68285]: _type = "HttpNfcLease" [ 1614.617362] env[68285]: } obtained for vApp import into resource pool (val){ [ 1614.617362] env[68285]: value = "resgroup-9" [ 1614.617362] env[68285]: _type = "ResourcePool" [ 1614.617362] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1614.617601] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the lease: (returnval){ [ 1614.617601] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52316fc8-d420-8339-33a2-d5d54ff21a6c" [ 1614.617601] env[68285]: _type = "HttpNfcLease" [ 1614.617601] env[68285]: } to be ready. {{(pid=68285) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1614.623303] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1614.623303] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52316fc8-d420-8339-33a2-d5d54ff21a6c" [ 1614.623303] env[68285]: _type = "HttpNfcLease" [ 1614.623303] env[68285]: } is initializing. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1614.836339] env[68285]: DEBUG oslo_concurrency.lockutils [req-13e5d8fa-646b-4497-b689-7ba6a585b877 req-10f85de2-d451-4c60-ba61-31ecc8de6a17 service nova] Releasing lock "refresh_cache-2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1614.950326] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: feda1a98-3086-43a6-a887-f4d1602ca8ee] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1615.125283] env[68285]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1615.125283] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52316fc8-d420-8339-33a2-d5d54ff21a6c" [ 1615.125283] env[68285]: _type = "HttpNfcLease" [ 1615.125283] env[68285]: } is ready. {{(pid=68285) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1615.125580] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1615.125580] env[68285]: value = "session[52410409-0226-2549-941e-c989b8ec60bd]52316fc8-d420-8339-33a2-d5d54ff21a6c" [ 1615.125580] env[68285]: _type = "HttpNfcLease" [ 1615.125580] env[68285]: }. {{(pid=68285) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1615.126294] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4a0e9c-6140-45ba-a82e-fc81ffd6c30d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.132904] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5259c178-4eaa-1c73-a7ed-d8dc519531fd/disk-0.vmdk from lease info. {{(pid=68285) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1615.133089] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5259c178-4eaa-1c73-a7ed-d8dc519531fd/disk-0.vmdk. {{(pid=68285) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1615.195043] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-dbf3fd7a-6f79-4f27-96c8-57a005c6d8d2 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.453817] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 801f524e-28b5-4452-b880-0fc30d3c5eef] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1615.956732] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d1446290-95ce-4e87-85df-7cc69bb57ce7] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1616.397024] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Completed reading data from the image iterator. {{(pid=68285) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1616.397280] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5259c178-4eaa-1c73-a7ed-d8dc519531fd/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1616.398239] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a267d79-e89c-4595-b02a-e752e5937c2d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.404524] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5259c178-4eaa-1c73-a7ed-d8dc519531fd/disk-0.vmdk is in state: ready. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1616.404691] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5259c178-4eaa-1c73-a7ed-d8dc519531fd/disk-0.vmdk. {{(pid=68285) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1616.404968] env[68285]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c59e33c7-e837-47e9-90f6-40bc9f204712 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.461408] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: 3858399e-9fc4-4d60-a9d5-95caefb7bd87] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1616.584295] env[68285]: DEBUG oslo_vmware.rw_handles [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5259c178-4eaa-1c73-a7ed-d8dc519531fd/disk-0.vmdk. {{(pid=68285) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1616.584508] env[68285]: INFO nova.virt.vmwareapi.images [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Downloaded image file data 0b82e540-ae04-4a9b-a308-8d68f85a37cb [ 1616.585344] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62d85a0-bdd4-43b3-89dd-1004ac2f6de8 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.600076] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19d0f2b0-284a-4316-93ec-ee56a072f1ae {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.628275] env[68285]: INFO nova.virt.vmwareapi.images [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] The imported VM was unregistered [ 1616.630425] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Caching image {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1616.630655] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Creating directory with path [datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1616.630901] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69299a3d-02f1-47fc-b324-af6e8195effa {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.640406] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Created directory with path [datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb {{(pid=68285) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1616.640582] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15/OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15.vmdk to [datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb/0b82e540-ae04-4a9b-a308-8d68f85a37cb.vmdk. {{(pid=68285) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1616.640801] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5bc14b4b-e5e0-4434-a16f-0630c04de040 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.646816] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1616.646816] env[68285]: value = "task-2892834" [ 1616.646816] env[68285]: _type = "Task" [ 1616.646816] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.654822] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892834, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.965405] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] [instance: d0f6ab86-e18d-42ac-bcf3-94eafb1939ff] Instance has had 0 of 5 cleanup attempts {{(pid=68285) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1617.156526] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892834, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.469101] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1617.469291] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Cleaning up deleted instances with incomplete migration {{(pid=68285) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1617.657085] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892834, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.972414] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.157724] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892834, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.659481] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892834, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.159092] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892834, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.187505} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.159468] env[68285]: INFO nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15/OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15.vmdk to [datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb/0b82e540-ae04-4a9b-a308-8d68f85a37cb.vmdk. [ 1619.159614] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Cleaning up location [datastore1] OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15 {{(pid=68285) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1619.159798] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_7d111b96-c3d4-4d58-9cd2-4fd715290d15 {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1619.160084] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a0f05ad-69f5-41ad-9ba6-9659000b1ad0 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.166786] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1619.166786] env[68285]: value = "task-2892835" [ 1619.166786] env[68285]: _type = "Task" [ 1619.166786] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.174414] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892835, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.676264] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892835, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.048481} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.676598] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1619.676736] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb/0b82e540-ae04-4a9b-a308-8d68f85a37cb.vmdk" {{(pid=68285) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1619.676975] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb/0b82e540-ae04-4a9b-a308-8d68f85a37cb.vmdk to [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1619.677240] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb42647e-9c6b-4fa9-a1c1-09a033b7d63f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.683992] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1619.683992] env[68285]: value = "task-2892836" [ 1619.683992] env[68285]: _type = "Task" [ 1619.683992] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.691068] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.194369] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892836, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.695278] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892836, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.196704] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892836, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.697931] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892836, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.198447] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892836, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.430806} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.198790] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/0b82e540-ae04-4a9b-a308-8d68f85a37cb/0b82e540-ae04-4a9b-a308-8d68f85a37cb.vmdk to [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk {{(pid=68285) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1622.199410] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db8b165-8954-4e98-a74d-91733f2404ff {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.220327] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1622.220551] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-655e26ba-bca4-45eb-b574-e386020d8dee {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.238395] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1622.238395] env[68285]: value = "task-2892837" [ 1622.238395] env[68285]: _type = "Task" [ 1622.238395] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.245450] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892837, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.750607] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892837, 'name': ReconfigVM_Task, 'duration_secs': 0.310881} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.750953] env[68285]: DEBUG nova.virt.vmwareapi.volumeops [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Reconfigured VM instance instance-00000080 to attach disk [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a/2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a.vmdk or device None with type streamOptimized {{(pid=68285) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1622.751766] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79f79a58-e9c6-4a3f-a803-9b3edd30635f {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.758995] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1622.758995] env[68285]: value = "task-2892838" [ 1622.758995] env[68285]: _type = "Task" [ 1622.758995] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.772050] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892838, 'name': Rename_Task} progress is 6%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.269421] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892838, 'name': Rename_Task, 'duration_secs': 0.35901} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.269805] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powering on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1623.270020] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c0a5964-1e56-4280-b544-83f4aad18d0c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.276468] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1623.276468] env[68285]: value = "task-2892839" [ 1623.276468] env[68285]: _type = "Task" [ 1623.276468] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.284395] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.790449] env[68285]: DEBUG oslo_vmware.api [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892839, 'name': PowerOnVM_Task, 'duration_secs': 0.43896} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.790449] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powered on the VM {{(pid=68285) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1623.894100] env[68285]: DEBUG nova.compute.manager [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Checking state {{(pid=68285) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1623.895031] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e361b6fb-1860-4749-afbb-4c3012b139a9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.412635] env[68285]: DEBUG oslo_concurrency.lockutils [None req-c65669e4-9bc4-4bbd-bc20-f095333747d5 tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.391s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1661.694833] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1661.695263] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1661.695383] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1661.695612] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1661.695797] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1661.699574] env[68285]: INFO nova.compute.manager [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Terminating instance [ 1662.203682] env[68285]: DEBUG nova.compute.manager [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Start destroying the instance on the hypervisor. {{(pid=68285) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1662.204014] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Destroying instance {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1662.205387] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7c8aa7-a07b-4299-a754-1cef32a3620b {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.213641] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powering off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1662.213861] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac1fd5a9-93eb-436d-ad57-cabf03affb10 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.221693] env[68285]: DEBUG oslo_vmware.api [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1662.221693] env[68285]: value = "task-2892840" [ 1662.221693] env[68285]: _type = "Task" [ 1662.221693] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.229352] env[68285]: DEBUG oslo_vmware.api [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.731077] env[68285]: DEBUG oslo_vmware.api [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892840, 'name': PowerOffVM_Task, 'duration_secs': 0.181053} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.731470] env[68285]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Powered off the VM {{(pid=68285) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1662.731470] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Unregistering the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1662.732130] env[68285]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef8bc3f9-d038-4559-8938-d3d401ce7d72 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.801437] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Unregistered the VM {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1662.801800] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Deleting contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1662.802066] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleting the datastore file [datastore1] 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1662.802354] env[68285]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1e83b79-f906-449d-84a6-a75a3a5857da {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.810450] env[68285]: DEBUG oslo_vmware.api [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for the task: (returnval){ [ 1662.810450] env[68285]: value = "task-2892842" [ 1662.810450] env[68285]: _type = "Task" [ 1662.810450] env[68285]: } to complete. {{(pid=68285) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.818769] env[68285]: DEBUG oslo_vmware.api [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.323882] env[68285]: DEBUG oslo_vmware.api [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Task: {'id': task-2892842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142956} completed successfully. {{(pid=68285) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.324294] env[68285]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted the datastore file {{(pid=68285) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1663.324605] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Deleted contents of the VM from datastore datastore1 {{(pid=68285) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1663.324914] env[68285]: DEBUG nova.virt.vmwareapi.vmops [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Instance destroyed {{(pid=68285) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1663.325217] env[68285]: INFO nova.compute.manager [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1663.325637] env[68285]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68285) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1663.325944] env[68285]: DEBUG nova.compute.manager [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Deallocating network for instance {{(pid=68285) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1663.326141] env[68285]: DEBUG nova.network.neutron [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] deallocate_for_instance() {{(pid=68285) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1663.912867] env[68285]: DEBUG nova.compute.manager [req-2ba862ef-27dd-47e2-9627-e116f8ab1c56 req-d503edb0-7ee7-4cd0-8933-d3d1edcf14c1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Received event network-vif-deleted-d3493390-fdd6-4fff-a513-fd627306eed0 {{(pid=68285) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1663.913113] env[68285]: INFO nova.compute.manager [req-2ba862ef-27dd-47e2-9627-e116f8ab1c56 req-d503edb0-7ee7-4cd0-8933-d3d1edcf14c1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Neutron deleted interface d3493390-fdd6-4fff-a513-fd627306eed0; detaching it from the instance and deleting it from the info cache [ 1663.913113] env[68285]: DEBUG nova.network.neutron [req-2ba862ef-27dd-47e2-9627-e116f8ab1c56 req-d503edb0-7ee7-4cd0-8933-d3d1edcf14c1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.372738] env[68285]: DEBUG nova.network.neutron [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Updating instance_info_cache with network_info: [] {{(pid=68285) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.418031] env[68285]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-890f24e9-3915-49f2-ab06-169cc88ef4bc {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.426275] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e8c11d-c75f-4637-ad16-230504834d02 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.449848] env[68285]: DEBUG nova.compute.manager [req-2ba862ef-27dd-47e2-9627-e116f8ab1c56 req-d503edb0-7ee7-4cd0-8933-d3d1edcf14c1 service nova] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Detach interface failed, port_id=d3493390-fdd6-4fff-a513-fd627306eed0, reason: Instance 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a could not be found. {{(pid=68285) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1664.876281] env[68285]: INFO nova.compute.manager [-] [instance: 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a] Took 1.55 seconds to deallocate network for instance. [ 1665.382628] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1665.382968] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1665.383144] env[68285]: DEBUG nova.objects.instance [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lazy-loading 'resources' on Instance uuid 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a {{(pid=68285) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.918830] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cbfefe-6f8c-4b6b-abb8-0610d2221ce5 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.925636] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f22d53f-ce35-4806-9dbf-16a6aacf9f92 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.956738] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae25dab-1809-4b07-b782-9922a27b20f9 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.963776] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e084c1e7-398b-447e-88da-be01aa348a9d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.976950] env[68285]: DEBUG nova.compute.provider_tree [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1666.479796] env[68285]: DEBUG nova.scheduler.client.report [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1666.984814] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.602s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1667.005380] env[68285]: INFO nova.scheduler.client.report [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Deleted allocations for instance 2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a [ 1667.474311] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.474558] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.512136] env[68285]: DEBUG oslo_concurrency.lockutils [None req-a8864403-f525-44d8-bb86-30a44799f6ab tempest-AttachVolumeShelveTestJSON-1141669464 tempest-AttachVolumeShelveTestJSON-1141669464-project-member] Lock "2a447bab-9d72-4b8d-8e2e-e03e3a2e0b9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.817s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1667.978674] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.978888] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.979069] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.979239] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.979427] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.979543] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.979678] env[68285]: DEBUG nova.compute.manager [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68285) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1667.979823] env[68285]: DEBUG oslo_service.periodic_task [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68285) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.482396] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1668.482627] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1668.482895] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1668.483107] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68285) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1668.484042] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3def995c-b3f4-4bc8-9a6a-d1b981578da6 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.492645] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914044bf-a4be-434c-ada0-2acb80539009 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.507088] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e3ce4e-7dc9-4893-9997-9eae88e70577 {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.512743] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6824c797-0c88-43c9-a4a7-bf3884ba991c {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.542309] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181074MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=68285) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1668.542514] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1668.542747] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1669.562409] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1669.562651] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68285) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1669.578042] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Refreshing inventories for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1669.590507] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Updating ProviderTree inventory for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1669.590691] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Updating inventory in ProviderTree for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1669.600426] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Refreshing aggregate associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, aggregates: None {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1669.616348] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Refreshing trait associations for resource provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=68285) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1669.627430] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b372a5c-bfba-4edb-b37f-59ad73be6abf {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.634295] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedc96d3-ce4e-45d5-bf11-444df00d04db {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.662919] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e88aba-9f40-459f-b35d-850df23d41fb {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.669345] env[68285]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919227c1-2a8b-45e1-b436-75d8a2aee62d {{(pid=68285) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.681521] env[68285]: DEBUG nova.compute.provider_tree [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed in ProviderTree for provider: 7bdf675d-15ae-4a4b-9c03-79d8c773b76b {{(pid=68285) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.184403] env[68285]: DEBUG nova.scheduler.client.report [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Inventory has not changed for provider 7bdf675d-15ae-4a4b-9c03-79d8c773b76b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68285) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1670.690213] env[68285]: DEBUG nova.compute.resource_tracker [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68285) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1670.690589] env[68285]: DEBUG oslo_concurrency.lockutils [None req-06584ce2-d677-4e40-a33d-d1815d788d27 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.148s {{(pid=68285) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}